From 7cfd05ee063cb97470b9c75eb3b03281668fd5d0 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:07:23 -0800 Subject: [PATCH 01/16] deps: promise-spawn@8.0.2 --- node_modules/@npmcli/promise-spawn/lib/index.js | 16 ++++++++++++++-- node_modules/@npmcli/promise-spawn/package.json | 6 +++--- package-lock.json | 8 ++++---- package.json | 2 +- 4 files changed, 22 insertions(+), 10 deletions(-) diff --git a/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/promise-spawn/lib/index.js index e147cb8f9c746..aa7b55d8f038d 100644 --- a/node_modules/@npmcli/promise-spawn/lib/index.js +++ b/node_modules/@npmcli/promise-spawn/lib/index.js @@ -131,9 +131,19 @@ const open = (_args, opts = {}, extra = {}) => { let platform = process.platform // process.platform === 'linux' may actually indicate WSL, if that's the case - // we want to treat things as win32 anyway so the host can open the argument + // open the argument with sensible-browser which is pre-installed + // In WSL, set the default browser using, for example, + // export BROWSER="/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe" + // or + // export BROWSER="/mnt/c/Program Files (x86)/Microsoft/Edge/Application/msedge.exe" + // To permanently set the default browser, add the appropriate entry to your shell's + // RC file, e.g. .bashrc or .zshrc. if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) { - platform = 'win32' + platform = 'wsl' + if (!process.env.BROWSER) { + return Promise.reject( + new Error('Set the BROWSER environment variable to your desired browser.')) + } } let command = options.command @@ -146,6 +156,8 @@ const open = (_args, opts = {}, extra = {}) => { // accidentally interpret the first arg as the title, we stick an empty // string immediately after the start command command = 'start ""' + } else if (platform === 'wsl') { + command = 'sensible-browser' } else if (platform === 'darwin') { command = 'open' } else { diff --git a/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/promise-spawn/package.json index 9914063f85156..f5fb026be50e8 100644 --- a/node_modules/@npmcli/promise-spawn/package.json +++ b/node_modules/@npmcli/promise-spawn/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/promise-spawn", - "version": "8.0.1", + "version": "8.0.2", "files": [ "bin/", "lib/" @@ -33,7 +33,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.23.4", "spawk": "^1.7.1", "tap": "^16.0.1" }, @@ -42,7 +42,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.23.4", "publish": true }, "dependencies": { diff --git a/package-lock.json b/package-lock.json index 9af770d60921e..654942f366671 100644 --- a/package-lock.json +++ b/package-lock.json @@ -92,7 +92,7 @@ "@npmcli/fs": "^4.0.0", "@npmcli/map-workspaces": "^4.0.1", "@npmcli/package-json": "^6.0.1", - "@npmcli/promise-spawn": "^8.0.1", + "@npmcli/promise-spawn": "^8.0.2", "@npmcli/redact": "^3.0.0", "@npmcli/run-script": "^9.0.1", "@sigstore/tuf": "^2.3.4", @@ -2470,9 +2470,9 @@ } }, "node_modules/@npmcli/promise-spawn": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.1.tgz", - "integrity": "sha512-ZscqKtJqy7oj6MgXEJcHQ1om4utU0Q84QtC28UVuiO6ALSO9sDPanXdu6Wd1oYhItW8fx2u96zRFUE8BuPlAjA==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.2.tgz", + "integrity": "sha512-/bNJhjc+o6qL+Dwz/bqfTQClkEO5nTQ1ZEcdCkAQjhkZMHIh22LPG7fNh1enJP1NKWDqYiiABnjFCY7E0zHYtQ==", "inBundle": true, "license": "ISC", "dependencies": { diff --git a/package.json b/package.json index c92578506b30a..f87e20bb6888e 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,7 @@ "@npmcli/fs": "^4.0.0", "@npmcli/map-workspaces": "^4.0.1", "@npmcli/package-json": "^6.0.1", - "@npmcli/promise-spawn": "^8.0.1", + "@npmcli/promise-spawn": "^8.0.2", "@npmcli/redact": "^3.0.0", "@npmcli/run-script": "^9.0.1", "@sigstore/tuf": "^2.3.4", From 9827c53090e27e77ed46f0cf93a1159412dff112 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:07:57 -0800 Subject: [PATCH 02/16] deps: ci-info@4.1.0 --- node_modules/ci-info/index.js | 80 ++++++++++++++++++------------- node_modules/ci-info/package.json | 28 ++++++----- node_modules/ci-info/vendors.json | 16 ++++++- package-lock.json | 8 ++-- package.json | 2 +- 5 files changed, 82 insertions(+), 52 deletions(-) diff --git a/node_modules/ci-info/index.js b/node_modules/ci-info/index.js index 47907264581eb..9eba6940c4147 100644 --- a/node_modules/ci-info/index.js +++ b/node_modules/ci-info/index.js @@ -13,6 +13,7 @@ Object.defineProperty(exports, '_vendors', { exports.name = null exports.isPR = null +exports.id = null vendors.forEach(function (vendor) { const envs = Array.isArray(vendor.env) ? vendor.env : [vendor.env] @@ -27,45 +28,23 @@ vendors.forEach(function (vendor) { } exports.name = vendor.name - - switch (typeof vendor.pr) { - case 'string': - // "pr": "CIRRUS_PR" - exports.isPR = !!env[vendor.pr] - break - case 'object': - if ('env' in vendor.pr) { - // "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" } - exports.isPR = vendor.pr.env in env && env[vendor.pr.env] !== vendor.pr.ne - } else if ('any' in vendor.pr) { - // "pr": { "any": ["ghprbPullId", "CHANGE_ID"] } - exports.isPR = vendor.pr.any.some(function (key) { - return !!env[key] - }) - } else { - // "pr": { "DRONE_BUILD_EVENT": "pull_request" } - exports.isPR = checkEnv(vendor.pr) - } - break - default: - // PR detection not supported for this vendor - exports.isPR = null - } + exports.isPR = checkPR(vendor) + exports.id = vendor.constant }) exports.isCI = !!( env.CI !== 'false' && // Bypass all checks if CI env is explicitly set to 'false' (env.BUILD_ID || // Jenkins, Cloudbees - env.BUILD_NUMBER || // Jenkins, TeamCity - env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari - env.CI_APP_ID || // Appflow - env.CI_BUILD_ID || // Appflow - env.CI_BUILD_NUMBER || // Appflow - env.CI_NAME || // Codeship and others - env.CONTINUOUS_INTEGRATION || // Travis CI, Cirrus CI - env.RUN_ID || // TaskCluster, dsari - exports.name || - false) + env.BUILD_NUMBER || // Jenkins, TeamCity + env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari + env.CI_APP_ID || // Appflow + env.CI_BUILD_ID || // Appflow + env.CI_BUILD_NUMBER || // Appflow + env.CI_NAME || // Codeship and others + env.CONTINUOUS_INTEGRATION || // Travis CI, Cirrus CI + env.RUN_ID || // TaskCluster, dsari + exports.name || + false) ) function checkEnv (obj) { @@ -79,12 +58,45 @@ function checkEnv (obj) { return env[obj.env] && env[obj.env].includes(obj.includes) // } } + if ('any' in obj) { return obj.any.some(function (k) { return !!env[k] }) } + return Object.keys(obj).every(function (k) { return env[k] === obj[k] }) } + +function checkPR (vendor) { + switch (typeof vendor.pr) { + case 'string': + // "pr": "CIRRUS_PR" + return !!env[vendor.pr] + case 'object': + if ('env' in vendor.pr) { + if ('any' in vendor.pr) { + // "pr": { "env": "CODEBUILD_WEBHOOK_EVENT", "any": ["PULL_REQUEST_CREATED", "PULL_REQUEST_UPDATED"] } + return vendor.pr.any.some(function (key) { + return env[vendor.pr.env] === key + }) + } else { + // "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" } + return vendor.pr.env in env && env[vendor.pr.env] !== vendor.pr.ne + } + } else if ('any' in vendor.pr) { + // "pr": { "any": ["ghprbPullId", "CHANGE_ID"] } + return vendor.pr.any.some(function (key) { + return !!env[key] + }) + } else { + // "pr": { "DRONE_BUILD_EVENT": "pull_request" } + return checkEnv(vendor.pr) + } + default: + // PR detection not supported for this vendor + return null + } +} diff --git a/node_modules/ci-info/package.json b/node_modules/ci-info/package.json index 3c6b9e4adac8e..156329d2ce379 100644 --- a/node_modules/ci-info/package.json +++ b/node_modules/ci-info/package.json @@ -1,6 +1,6 @@ { "name": "ci-info", - "version": "4.0.0", + "version": "4.1.0", "description": "Get details about the current Continuous Integration environment", "main": "index.js", "typings": "index.d.ts", @@ -9,6 +9,18 @@ "repository": "https://github.com/watson/ci-info.git", "bugs": "https://github.com/watson/ci-info/issues", "homepage": "https://github.com/watson/ci-info", + "contributors": [ + { + "name": "Sibiraj", + "url": "https://github.com/sibiraj-s" + } + ], + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], "keywords": [ "ci", "continuous", @@ -22,22 +34,16 @@ "index.d.ts", "CHANGELOG.md" ], - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/sibiraj-s" - } - ], "scripts": { "lint:fix": "standard --fix", "test": "standard && node test.js", - "prepare": "husky install" + "prepare": "husky install || true" }, "devDependencies": { "clear-module": "^4.1.2", - "husky": "^8.0.3", - "standard": "^17.1.0", - "tape": "^5.7.0" + "husky": "^9.1.6", + "standard": "^17.1.2", + "tape": "^5.9.0" }, "engines": { "node": ">=8" diff --git a/node_modules/ci-info/vendors.json b/node_modules/ci-info/vendors.json index 6b65e3f9b541f..64d5924d1a557 100644 --- a/node_modules/ci-info/vendors.json +++ b/node_modules/ci-info/vendors.json @@ -8,7 +8,11 @@ { "name": "Appcircle", "constant": "APPCIRCLE", - "env": "AC_APPCIRCLE" + "env": "AC_APPCIRCLE", + "pr": { + "env": "AC_GIT_PR", + "ne": "false" + } }, { "name": "AppVeyor", @@ -19,7 +23,15 @@ { "name": "AWS CodeBuild", "constant": "CODEBUILD", - "env": "CODEBUILD_BUILD_ARN" + "env": "CODEBUILD_BUILD_ARN", + "pr": { + "env": "CODEBUILD_WEBHOOK_EVENT", + "any": [ + "PULL_REQUEST_CREATED", + "PULL_REQUEST_UPDATED", + "PULL_REQUEST_REOPENED" + ] + } }, { "name": "Azure Pipelines", diff --git a/package-lock.json b/package-lock.json index 654942f366671..49d69fb057436 100644 --- a/package-lock.json +++ b/package-lock.json @@ -100,7 +100,7 @@ "archy": "~1.0.0", "cacache": "^19.0.1", "chalk": "^5.3.0", - "ci-info": "^4.0.0", + "ci-info": "^4.1.0", "cli-columns": "^4.0.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.3", @@ -4856,9 +4856,9 @@ } }, "node_modules/ci-info": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.0.0.tgz", - "integrity": "sha512-TdHqgGf9odd8SXNuxtUBVx8Nv+qZOejE6qyqiy5NtbYYQOeFa6zmHkxlPzmaLxWWHsU6nJmB7AETdVPi+2NBUg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.1.0.tgz", + "integrity": "sha512-HutrvTNsF48wnxkzERIXOe5/mlcfFcbfCmwcg6CJnizbSue78AbDt+1cgl26zwn61WFxhcPykPfZrbqjGmBb4A==", "funding": [ { "type": "github", diff --git a/package.json b/package.json index f87e20bb6888e..432b53c02e7ce 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "archy": "~1.0.0", "cacache": "^19.0.1", "chalk": "^5.3.0", - "ci-info": "^4.0.0", + "ci-info": "^4.1.0", "cli-columns": "^4.0.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.3", From 8566f15f1ee77e5ca12c653aac20a6c9a21dece9 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:08:22 -0800 Subject: [PATCH 03/16] deps: make-fetch-happen@14.0.3 --- node_modules/.gitignore | 3 + node_modules/make-fetch-happen/lib/options.js | 7 +- node_modules/make-fetch-happen/lib/remote.js | 3 +- .../node_modules/negotiator/HISTORY.md | 114 +++++++ .../node_modules/negotiator/LICENSE | 24 ++ .../node_modules/negotiator/index.js | 83 +++++ .../node_modules/negotiator/lib/charset.js | 169 ++++++++++ .../node_modules/negotiator/lib/encoding.js | 205 ++++++++++++ .../node_modules/negotiator/lib/language.js | 179 +++++++++++ .../node_modules/negotiator/lib/mediaType.js | 294 ++++++++++++++++++ .../node_modules/negotiator/package.json | 43 +++ node_modules/make-fetch-happen/package.json | 8 +- package-lock.json | 20 +- package.json | 2 +- 14 files changed, 1142 insertions(+), 12 deletions(-) create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/LICENSE create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js create mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 2f1ad728a5e71..85d675d1bd3b4 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -133,6 +133,9 @@ !/just-diff !/lru-cache !/make-fetch-happen +!/make-fetch-happen/node_modules/ +/make-fetch-happen/node_modules/* +!/make-fetch-happen/node_modules/negotiator !/minimatch !/minipass-collect !/minipass-fetch diff --git a/node_modules/make-fetch-happen/lib/options.js b/node_modules/make-fetch-happen/lib/options.js index f77511279f831..db51cc6324817 100644 --- a/node_modules/make-fetch-happen/lib/options.js +++ b/node_modules/make-fetch-happen/lib/options.js @@ -11,7 +11,12 @@ const conditionalHeaders = [ const configureOptions = (opts) => { const { strictSSL, ...options } = { ...opts } options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false + + if (strictSSL === undefined || strictSSL === null) { + options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0' + } else { + options.rejectUnauthorized = strictSSL !== false + } if (!options.retry) { options.retry = { retries: 0 } diff --git a/node_modules/make-fetch-happen/lib/remote.js b/node_modules/make-fetch-happen/lib/remote.js index 8554564074de6..1d640e5380baa 100644 --- a/node_modules/make-fetch-happen/lib/remote.js +++ b/node_modules/make-fetch-happen/lib/remote.js @@ -35,7 +35,8 @@ const RETRY_TYPES = [ // following redirects (through the cache if necessary) // and verifying response integrity const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) + // options.signal is intended for the fetch itself, not the agent. Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one. + const agent = getAgent(request.url, { ...options, signal: undefined }) if (!request.headers.has('connection')) { request.headers.set('connection', agent ? 'keep-alive' : 'close') } diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md b/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md new file mode 100644 index 0000000000000..63d537d3f6811 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md @@ -0,0 +1,114 @@ +1.0.0 / 2024-08-31 +================== + + * Drop support for node <18 + * Added an option preferred encodings array #59 + +0.6.3 / 2022-01-22 +================== + + * Revert "Lazy-load modules from main entry point" + +0.6.2 / 2019-04-29 +================== + + * Fix sorting charset, encoding, and language with extra parameters + +0.6.1 / 2016-05-02 +================== + + * perf: improve `Accept` parsing speed + * perf: improve `Accept-Charset` parsing speed + * perf: improve `Accept-Encoding` parsing speed + * perf: improve `Accept-Language` parsing speed + +0.6.0 / 2015-09-29 +================== + + * Fix including type extensions in parameters in `Accept` parsing + * Fix parsing `Accept` parameters with quoted equals + * Fix parsing `Accept` parameters with quoted semicolons + * Lazy-load modules from main entry point + * perf: delay type concatenation until needed + * perf: enable strict mode + * perf: hoist regular expressions + * perf: remove closures getting spec properties + * perf: remove a closure from media type parsing + * perf: remove property delete from media type parsing + +0.5.3 / 2015-05-10 +================== + + * Fix media type parameter matching to be case-insensitive + +0.5.2 / 2015-05-06 +================== + + * Fix comparing media types with quoted values + * Fix splitting media types with quoted commas + +0.5.1 / 2015-02-14 +================== + + * Fix preference sorting to be stable for long acceptable lists + +0.5.0 / 2014-12-18 +================== + + * Fix list return order when large accepted list + * Fix missing identity encoding when q=0 exists + * Remove dynamic building of Negotiator class + +0.4.9 / 2014-10-14 +================== + + * Fix error when media type has invalid parameter + +0.4.8 / 2014-09-28 +================== + + * Fix all negotiations to be case-insensitive + * Stable sort preferences of same quality according to client order + * Support Node.js 0.6 + +0.4.7 / 2014-06-24 +================== + + * Handle invalid provided languages + * Handle invalid provided media types + +0.4.6 / 2014-06-11 +================== + + * Order by specificity when quality is the same + +0.4.5 / 2014-05-29 +================== + + * Fix regression in empty header handling + +0.4.4 / 2014-05-29 +================== + + * Fix behaviors when headers are not present + +0.4.3 / 2014-04-16 +================== + + * Handle slashes on media params correctly + +0.4.2 / 2014-02-28 +================== + + * Fix media type sorting + * Handle media types params strictly + +0.4.1 / 2014-01-16 +================== + + * Use most specific matches + +0.4.0 / 2014-01-09 +================== + + * Remove preferred prefix from methods diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE b/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE new file mode 100644 index 0000000000000..ea6b9e2e9ac25 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2014 Federico Romero +Copyright (c) 2012-2014 Isaac Z. Schlueter +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/index.js b/node_modules/make-fetch-happen/node_modules/negotiator/index.js new file mode 100644 index 0000000000000..4f51315d6af4b --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/index.js @@ -0,0 +1,83 @@ +/*! + * negotiator + * Copyright(c) 2012 Federico Romero + * Copyright(c) 2012-2014 Isaac Z. Schlueter + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +var preferredCharsets = require('./lib/charset') +var preferredEncodings = require('./lib/encoding') +var preferredLanguages = require('./lib/language') +var preferredMediaTypes = require('./lib/mediaType') + +/** + * Module exports. + * @public + */ + +module.exports = Negotiator; +module.exports.Negotiator = Negotiator; + +/** + * Create a Negotiator instance from a request. + * @param {object} request + * @public + */ + +function Negotiator(request) { + if (!(this instanceof Negotiator)) { + return new Negotiator(request); + } + + this.request = request; +} + +Negotiator.prototype.charset = function charset(available) { + var set = this.charsets(available); + return set && set[0]; +}; + +Negotiator.prototype.charsets = function charsets(available) { + return preferredCharsets(this.request.headers['accept-charset'], available); +}; + +Negotiator.prototype.encoding = function encoding(available, opts) { + var set = this.encodings(available, opts); + return set && set[0]; +}; + +Negotiator.prototype.encodings = function encodings(available, options) { + var opts = options || {}; + return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred); +}; + +Negotiator.prototype.language = function language(available) { + var set = this.languages(available); + return set && set[0]; +}; + +Negotiator.prototype.languages = function languages(available) { + return preferredLanguages(this.request.headers['accept-language'], available); +}; + +Negotiator.prototype.mediaType = function mediaType(available) { + var set = this.mediaTypes(available); + return set && set[0]; +}; + +Negotiator.prototype.mediaTypes = function mediaTypes(available) { + return preferredMediaTypes(this.request.headers.accept, available); +}; + +// Backwards compatibility +Negotiator.prototype.preferredCharset = Negotiator.prototype.charset; +Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets; +Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding; +Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings; +Negotiator.prototype.preferredLanguage = Negotiator.prototype.language; +Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages; +Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType; +Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes; diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js new file mode 100644 index 0000000000000..cdd014803474a --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js @@ -0,0 +1,169 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredCharsets; +module.exports.preferredCharsets = preferredCharsets; + +/** + * Module variables. + * @private + */ + +var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Charset header. + * @private + */ + +function parseAcceptCharset(accept) { + var accepts = accept.split(','); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var charset = parseCharset(accepts[i].trim(), i); + + if (charset) { + accepts[j++] = charset; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a charset from the Accept-Charset header. + * @private + */ + +function parseCharset(str, i) { + var match = simpleCharsetRegExp.exec(str); + if (!match) return null; + + var charset = match[1]; + var q = 1; + if (match[2]) { + var params = match[2].split(';') + for (var j = 0; j < params.length; j++) { + var p = params[j].trim().split('='); + if (p[0] === 'q') { + q = parseFloat(p[1]); + break; + } + } + } + + return { + charset: charset, + q: q, + i: i + }; +} + +/** + * Get the priority of a charset. + * @private + */ + +function getCharsetPriority(charset, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(charset, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the charset. + * @private + */ + +function specify(charset, spec, index) { + var s = 0; + if(spec.charset.toLowerCase() === charset.toLowerCase()){ + s |= 1; + } else if (spec.charset !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +} + +/** + * Get the preferred charsets from an Accept-Charset header. + * @public + */ + +function preferredCharsets(accept, provided) { + // RFC 2616 sec 14.2: no header = * + var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || ''); + + if (!provided) { + // sorted list of all charsets + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullCharset); + } + + var priorities = provided.map(function getPriority(type, index) { + return getCharsetPriority(type, accepts, index); + }); + + // sorted list of accepted charsets + return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full charset string. + * @private + */ + +function getFullCharset(spec) { + return spec.charset; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js new file mode 100644 index 0000000000000..9ebb633d67743 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js @@ -0,0 +1,205 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredEncodings; +module.exports.preferredEncodings = preferredEncodings; + +/** + * Module variables. + * @private + */ + +var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Encoding header. + * @private + */ + +function parseAcceptEncoding(accept) { + var accepts = accept.split(','); + var hasIdentity = false; + var minQuality = 1; + + for (var i = 0, j = 0; i < accepts.length; i++) { + var encoding = parseEncoding(accepts[i].trim(), i); + + if (encoding) { + accepts[j++] = encoding; + hasIdentity = hasIdentity || specify('identity', encoding); + minQuality = Math.min(minQuality, encoding.q || 1); + } + } + + if (!hasIdentity) { + /* + * If identity doesn't explicitly appear in the accept-encoding header, + * it's added to the list of acceptable encoding with the lowest q + */ + accepts[j++] = { + encoding: 'identity', + q: minQuality, + i: i + }; + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse an encoding from the Accept-Encoding header. + * @private + */ + +function parseEncoding(str, i) { + var match = simpleEncodingRegExp.exec(str); + if (!match) return null; + + var encoding = match[1]; + var q = 1; + if (match[2]) { + var params = match[2].split(';'); + for (var j = 0; j < params.length; j++) { + var p = params[j].trim().split('='); + if (p[0] === 'q') { + q = parseFloat(p[1]); + break; + } + } + } + + return { + encoding: encoding, + q: q, + i: i + }; +} + +/** + * Get the priority of an encoding. + * @private + */ + +function getEncodingPriority(encoding, accepted, index) { + var priority = {encoding: encoding, o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(encoding, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the encoding. + * @private + */ + +function specify(encoding, spec, index) { + var s = 0; + if(spec.encoding.toLowerCase() === encoding.toLowerCase()){ + s |= 1; + } else if (spec.encoding !== '*' ) { + return null + } + + return { + encoding: encoding, + i: index, + o: spec.i, + q: spec.q, + s: s + } +}; + +/** + * Get the preferred encodings from an Accept-Encoding header. + * @public + */ + +function preferredEncodings(accept, provided, preferred) { + var accepts = parseAcceptEncoding(accept || ''); + + var comparator = preferred ? function comparator (a, b) { + if (a.q !== b.q) { + return b.q - a.q // higher quality first + } + + var aPreferred = preferred.indexOf(a.encoding) + var bPreferred = preferred.indexOf(b.encoding) + + if (aPreferred === -1 && bPreferred === -1) { + // consider the original specifity/order + return (b.s - a.s) || (a.o - b.o) || (a.i - b.i) + } + + if (aPreferred !== -1 && bPreferred !== -1) { + return aPreferred - bPreferred // consider the preferred order + } + + return aPreferred === -1 ? 1 : -1 // preferred first + } : compareSpecs; + + if (!provided) { + // sorted list of all encodings + return accepts + .filter(isQuality) + .sort(comparator) + .map(getFullEncoding); + } + + var priorities = provided.map(function getPriority(type, index) { + return getEncodingPriority(type, accepts, index); + }); + + // sorted list of accepted encodings + return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i); +} + +/** + * Get full encoding string. + * @private + */ + +function getFullEncoding(spec) { + return spec.encoding; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js new file mode 100644 index 0000000000000..a23167252719b --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js @@ -0,0 +1,179 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredLanguages; +module.exports.preferredLanguages = preferredLanguages; + +/** + * Module variables. + * @private + */ + +var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Language header. + * @private + */ + +function parseAcceptLanguage(accept) { + var accepts = accept.split(','); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var language = parseLanguage(accepts[i].trim(), i); + + if (language) { + accepts[j++] = language; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a language from the Accept-Language header. + * @private + */ + +function parseLanguage(str, i) { + var match = simpleLanguageRegExp.exec(str); + if (!match) return null; + + var prefix = match[1] + var suffix = match[2] + var full = prefix + + if (suffix) full += "-" + suffix; + + var q = 1; + if (match[3]) { + var params = match[3].split(';') + for (var j = 0; j < params.length; j++) { + var p = params[j].split('='); + if (p[0] === 'q') q = parseFloat(p[1]); + } + } + + return { + prefix: prefix, + suffix: suffix, + q: q, + i: i, + full: full + }; +} + +/** + * Get the priority of a language. + * @private + */ + +function getLanguagePriority(language, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(language, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the language. + * @private + */ + +function specify(language, spec, index) { + var p = parseLanguage(language) + if (!p) return null; + var s = 0; + if(spec.full.toLowerCase() === p.full.toLowerCase()){ + s |= 4; + } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) { + s |= 2; + } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) { + s |= 1; + } else if (spec.full !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +}; + +/** + * Get the preferred languages from an Accept-Language header. + * @public + */ + +function preferredLanguages(accept, provided) { + // RFC 2616 sec 14.4: no header = * + var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || ''); + + if (!provided) { + // sorted list of all languages + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullLanguage); + } + + var priorities = provided.map(function getPriority(type, index) { + return getLanguagePriority(type, accepts, index); + }); + + // sorted list of accepted languages + return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full language string. + * @private + */ + +function getFullLanguage(spec) { + return spec.full; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js new file mode 100644 index 0000000000000..8e402ea88394c --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js @@ -0,0 +1,294 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredMediaTypes; +module.exports.preferredMediaTypes = preferredMediaTypes; + +/** + * Module variables. + * @private + */ + +var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept header. + * @private + */ + +function parseAccept(accept) { + var accepts = splitMediaTypes(accept); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var mediaType = parseMediaType(accepts[i].trim(), i); + + if (mediaType) { + accepts[j++] = mediaType; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a media type from the Accept header. + * @private + */ + +function parseMediaType(str, i) { + var match = simpleMediaTypeRegExp.exec(str); + if (!match) return null; + + var params = Object.create(null); + var q = 1; + var subtype = match[2]; + var type = match[1]; + + if (match[3]) { + var kvps = splitParameters(match[3]).map(splitKeyValuePair); + + for (var j = 0; j < kvps.length; j++) { + var pair = kvps[j]; + var key = pair[0].toLowerCase(); + var val = pair[1]; + + // get the value, unwrapping quotes + var value = val && val[0] === '"' && val[val.length - 1] === '"' + ? val.slice(1, -1) + : val; + + if (key === 'q') { + q = parseFloat(value); + break; + } + + // store parameter + params[key] = value; + } + } + + return { + type: type, + subtype: subtype, + params: params, + q: q, + i: i + }; +} + +/** + * Get the priority of a media type. + * @private + */ + +function getMediaTypePriority(type, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(type, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the media type. + * @private + */ + +function specify(type, spec, index) { + var p = parseMediaType(type); + var s = 0; + + if (!p) { + return null; + } + + if(spec.type.toLowerCase() == p.type.toLowerCase()) { + s |= 4 + } else if(spec.type != '*') { + return null; + } + + if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) { + s |= 2 + } else if(spec.subtype != '*') { + return null; + } + + var keys = Object.keys(spec.params); + if (keys.length > 0) { + if (keys.every(function (k) { + return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase(); + })) { + s |= 1 + } else { + return null + } + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s, + } +} + +/** + * Get the preferred media types from an Accept header. + * @public + */ + +function preferredMediaTypes(accept, provided) { + // RFC 2616 sec 14.2: no header = */* + var accepts = parseAccept(accept === undefined ? '*/*' : accept || ''); + + if (!provided) { + // sorted list of all types + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullType); + } + + var priorities = provided.map(function getPriority(type, index) { + return getMediaTypePriority(type, accepts, index); + }); + + // sorted list of accepted types + return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full type string. + * @private + */ + +function getFullType(spec) { + return spec.type + '/' + spec.subtype; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} + +/** + * Count the number of quotes in a string. + * @private + */ + +function quoteCount(string) { + var count = 0; + var index = 0; + + while ((index = string.indexOf('"', index)) !== -1) { + count++; + index++; + } + + return count; +} + +/** + * Split a key value pair. + * @private + */ + +function splitKeyValuePair(str) { + var index = str.indexOf('='); + var key; + var val; + + if (index === -1) { + key = str; + } else { + key = str.slice(0, index); + val = str.slice(index + 1); + } + + return [key, val]; +} + +/** + * Split an Accept header into media types. + * @private + */ + +function splitMediaTypes(accept) { + var accepts = accept.split(','); + + for (var i = 1, j = 0; i < accepts.length; i++) { + if (quoteCount(accepts[j]) % 2 == 0) { + accepts[++j] = accepts[i]; + } else { + accepts[j] += ',' + accepts[i]; + } + } + + // trim accepts + accepts.length = j + 1; + + return accepts; +} + +/** + * Split a string of parameters. + * @private + */ + +function splitParameters(str) { + var parameters = str.split(';'); + + for (var i = 1, j = 0; i < parameters.length; i++) { + if (quoteCount(parameters[j]) % 2 == 0) { + parameters[++j] = parameters[i]; + } else { + parameters[j] += ';' + parameters[i]; + } + } + + // trim parameters + parameters.length = j + 1; + + for (var i = 0; i < parameters.length; i++) { + parameters[i] = parameters[i].trim(); + } + + return parameters; +} diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/package.json b/node_modules/make-fetch-happen/node_modules/negotiator/package.json new file mode 100644 index 0000000000000..e4bdc1ef4f748 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/negotiator/package.json @@ -0,0 +1,43 @@ +{ + "name": "negotiator", + "description": "HTTP content negotiation", + "version": "1.0.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Federico Romero ", + "Isaac Z. Schlueter (http://blog.izs.me/)" + ], + "license": "MIT", + "keywords": [ + "http", + "content negotiation", + "accept", + "accept-language", + "accept-encoding", + "accept-charset" + ], + "repository": "jshttp/negotiator", + "devDependencies": { + "eslint": "7.32.0", + "eslint-plugin-markdown": "2.2.1", + "mocha": "9.1.3", + "nyc": "15.1.0" + }, + "files": [ + "lib/", + "HISTORY.md", + "LICENSE", + "index.js", + "README.md" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index 0868ff6d7efa5..054fe841f13b7 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "14.0.1", + "version": "14.0.3", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -40,14 +40,14 @@ "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", + "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.23.4", "nock": "^13.2.4", "safe-buffer": "^5.2.1", "standard-version": "^9.3.2", @@ -68,7 +68,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.23.4", "publish": "true" } } diff --git a/package-lock.json b/package-lock.json index 49d69fb057436..3587ca05d8699 100644 --- a/package-lock.json +++ b/package-lock.json @@ -122,7 +122,7 @@ "libnpmsearch": "^8.0.0", "libnpmteam": "^7.0.0", "libnpmversion": "^7.0.0", - "make-fetch-happen": "^14.0.1", + "make-fetch-happen": "^14.0.3", "minimatch": "^9.0.5", "minipass": "^7.1.1", "minipass-pipeline": "^1.2.4", @@ -9609,9 +9609,9 @@ } }, "node_modules/make-fetch-happen": { - "version": "14.0.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.1.tgz", - "integrity": "sha512-Z1ndm71UQdcK362F5Wg4IFRBZq4MGeCz+uor5iPROkSjEWEoc1Zn7OSKPvmg01S9XOI8mr+GlRr+W4ABz4ZgdA==", + "version": "14.0.3", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz", + "integrity": "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==", "inBundle": true, "license": "ISC", "dependencies": { @@ -9622,7 +9622,7 @@ "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", + "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" @@ -9631,6 +9631,16 @@ "node": "^18.17.0 || >=20.5.0" } }, + "node_modules/make-fetch-happen/node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/map-obj": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", diff --git a/package.json b/package.json index 432b53c02e7ce..dbd720e08cf5d 100644 --- a/package.json +++ b/package.json @@ -87,7 +87,7 @@ "libnpmsearch": "^8.0.0", "libnpmteam": "^7.0.0", "libnpmversion": "^7.0.0", - "make-fetch-happen": "^14.0.1", + "make-fetch-happen": "^14.0.3", "minimatch": "^9.0.5", "minipass": "^7.1.1", "minipass-pipeline": "^1.2.4", From 092935972e6f475addde0296a41e67612d40ac31 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:08:51 -0800 Subject: [PATCH 04/16] deps: npm-registry-fetch@18.0.2 --- .../npm-registry-fetch/lib/check-response.js | 16 ++++++++++++---- node_modules/npm-registry-fetch/package.json | 8 ++++---- package-lock.json | 8 ++++---- package.json | 2 +- 4 files changed, 21 insertions(+), 13 deletions(-) diff --git a/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/npm-registry-fetch/lib/check-response.js index 65eea2963b0b4..2f183082ab2ce 100644 --- a/node_modules/npm-registry-fetch/lib/check-response.js +++ b/node_modules/npm-registry-fetch/lib/check-response.js @@ -48,10 +48,18 @@ function logRequest (method, res, startTime) { const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : '' const urlStr = cleanUrl(res.url) - log.http( - 'fetch', - `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}` - ) + // If make-fetch-happen reports a cache hit, then there was no fetch + if (cacheStatus === 'hit') { + log.http( + 'cache', + `${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}` + ) + } else { + log.http( + 'fetch', + `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}` + ) + } } function checkErrors (method, res, startTime, opts) { diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json index 559473b964aaa..bd7a79d35e26a 100644 --- a/node_modules/npm-registry-fetch/package.json +++ b/node_modules/npm-registry-fetch/package.json @@ -1,6 +1,6 @@ { "name": "npm-registry-fetch", - "version": "18.0.1", + "version": "18.0.2", "description": "Fetch-based http client for use with npm registry APIs", "main": "lib", "files": [ @@ -42,8 +42,8 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", - "cacache": "^18.0.0", + "@npmcli/template-oss": "4.23.4", + "cacache": "^19.0.1", "nock": "^13.2.4", "require-inject": "^1.4.4", "ssri": "^12.0.0", @@ -62,7 +62,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.23.4", "publish": "true" } } diff --git a/package-lock.json b/package-lock.json index 3587ca05d8699..6d84e53ec8e9e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -135,7 +135,7 @@ "npm-package-arg": "^12.0.0", "npm-pick-manifest": "^10.0.0", "npm-profile": "^11.0.1", - "npm-registry-fetch": "^18.0.1", + "npm-registry-fetch": "^18.0.2", "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", "pacote": "^19.0.0", @@ -11454,9 +11454,9 @@ } }, "node_modules/npm-registry-fetch": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-18.0.1.tgz", - "integrity": "sha512-5XKlWmVtfTTmnU6rKBjjQDMdnFOVAH9t7D4DG1ZcsIDwkGYBTUl0fMnbzsVSM0t/HZRpyE1VMLZv9O0Bvkj3UA==", + "version": "18.0.2", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-18.0.2.tgz", + "integrity": "sha512-LeVMZBBVy+oQb5R6FDV9OlJCcWDU+al10oKpe+nsvcHnG24Z3uM3SvJYKfGJlfGjVU8v9liejCrUR/M5HO5NEQ==", "inBundle": true, "license": "ISC", "dependencies": { diff --git a/package.json b/package.json index dbd720e08cf5d..42484fce46f5a 100644 --- a/package.json +++ b/package.json @@ -100,7 +100,7 @@ "npm-package-arg": "^12.0.0", "npm-pick-manifest": "^10.0.0", "npm-profile": "^11.0.1", - "npm-registry-fetch": "^18.0.1", + "npm-registry-fetch": "^18.0.2", "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", "pacote": "^19.0.0", From 49e1ec4c079b83ce2989860608c028ebeda1b7ba Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:09:33 -0800 Subject: [PATCH 05/16] deps: pacote@19.0.1 --- node_modules/.gitignore | 14 + node_modules/pacote/lib/dir.js | 2 + node_modules/pacote/lib/fetcher.js | 10 +- .../node_modules/@sigstore/bundle/LICENSE | 202 ++++++++++ .../@sigstore/bundle/dist/build.js | 100 +++++ .../@sigstore/bundle/dist/bundle.js | 24 ++ .../@sigstore/bundle/dist/error.js | 25 ++ .../@sigstore/bundle/dist/index.js | 43 +++ .../@sigstore/bundle/dist/serialized.js | 49 +++ .../@sigstore/bundle/dist/utility.js | 2 + .../@sigstore/bundle/dist/validate.js | 199 ++++++++++ .../@sigstore/bundle/package.json | 35 ++ .../node_modules/@sigstore/core/LICENSE | 202 ++++++++++ .../@sigstore/core/dist/asn1/error.js | 24 ++ .../@sigstore/core/dist/asn1/index.js | 20 + .../@sigstore/core/dist/asn1/length.js | 62 ++++ .../@sigstore/core/dist/asn1/obj.js | 152 ++++++++ .../@sigstore/core/dist/asn1/parse.js | 124 +++++++ .../@sigstore/core/dist/asn1/tag.js | 86 +++++ .../@sigstore/core/dist/crypto.js | 60 +++ .../node_modules/@sigstore/core/dist/dsse.js | 30 ++ .../@sigstore/core/dist/encoding.js | 27 ++ .../node_modules/@sigstore/core/dist/index.js | 56 +++ .../node_modules/@sigstore/core/dist/json.js | 60 +++ .../node_modules/@sigstore/core/dist/oid.js | 14 + .../node_modules/@sigstore/core/dist/pem.js | 43 +++ .../@sigstore/core/dist/rfc3161/error.js | 21 ++ .../@sigstore/core/dist/rfc3161/index.js | 20 + .../@sigstore/core/dist/rfc3161/timestamp.js | 201 ++++++++++ .../@sigstore/core/dist/rfc3161/tstinfo.js | 61 +++ .../@sigstore/core/dist/stream.js | 115 ++++++ .../@sigstore/core/dist/x509/cert.js | 230 ++++++++++++ .../@sigstore/core/dist/x509/ext.js | 145 ++++++++ .../@sigstore/core/dist/x509/index.js | 23 ++ .../@sigstore/core/dist/x509/sct.js | 141 +++++++ .../node_modules/@sigstore/core/package.json | 31 ++ .../node_modules/@sigstore/sign/LICENSE | 202 ++++++++++ .../@sigstore/sign/dist/bundler/base.js | 50 +++ .../@sigstore/sign/dist/bundler/bundle.js | 71 ++++ .../@sigstore/sign/dist/bundler/dsse.js | 46 +++ .../@sigstore/sign/dist/bundler/index.js | 7 + .../@sigstore/sign/dist/bundler/message.js | 30 ++ .../node_modules/@sigstore/sign/dist/error.js | 39 ++ .../@sigstore/sign/dist/external/error.js | 26 ++ .../@sigstore/sign/dist/external/fetch.js | 98 +++++ .../@sigstore/sign/dist/external/fulcio.js | 41 ++ .../@sigstore/sign/dist/external/rekor.js | 80 ++++ .../@sigstore/sign/dist/external/tsa.js | 38 ++ .../@sigstore/sign/dist/identity/ci.js | 73 ++++ .../@sigstore/sign/dist/identity/index.js | 20 + .../@sigstore/sign/dist/identity/provider.js | 2 + .../node_modules/@sigstore/sign/dist/index.js | 17 + .../@sigstore/sign/dist/signer/fulcio/ca.js | 59 +++ .../sign/dist/signer/fulcio/ephemeral.js | 45 +++ .../sign/dist/signer/fulcio/index.js | 87 +++++ .../@sigstore/sign/dist/signer/index.js | 22 ++ .../@sigstore/sign/dist/signer/signer.js | 17 + .../@sigstore/sign/dist/types/fetch.js | 2 + .../@sigstore/sign/dist/util/index.js | 49 +++ .../@sigstore/sign/dist/util/oidc.js | 30 ++ .../@sigstore/sign/dist/util/ua.js | 32 ++ .../@sigstore/sign/dist/witness/index.js | 24 ++ .../sign/dist/witness/tlog/client.js | 61 +++ .../@sigstore/sign/dist/witness/tlog/entry.js | 140 +++++++ .../@sigstore/sign/dist/witness/tlog/index.js | 82 ++++ .../@sigstore/sign/dist/witness/tsa/client.js | 46 +++ .../@sigstore/sign/dist/witness/tsa/index.js | 44 +++ .../@sigstore/sign/dist/witness/witness.js | 2 + .../node_modules/@sigstore/sign/package.json | 46 +++ .../pacote/node_modules/@sigstore/tuf/LICENSE | 202 ++++++++++ .../@sigstore/tuf/dist/appdata.js | 43 +++ .../node_modules/@sigstore/tuf/dist/client.js | 111 ++++++ .../node_modules/@sigstore/tuf/dist/error.js | 12 + .../node_modules/@sigstore/tuf/dist/index.js | 56 +++ .../node_modules/@sigstore/tuf/dist/target.js | 79 ++++ .../node_modules/@sigstore/tuf/package.json | 41 ++ .../node_modules/@sigstore/tuf/seeds.json | 1 + .../@sigstore/verify/dist/bundle/dsse.js | 43 +++ .../@sigstore/verify/dist/bundle/index.js | 57 +++ .../@sigstore/verify/dist/bundle/message.js | 36 ++ .../@sigstore/verify/dist/error.js | 32 ++ .../@sigstore/verify/dist/index.js | 28 ++ .../@sigstore/verify/dist/key/certificate.js | 205 ++++++++++ .../@sigstore/verify/dist/key/index.js | 72 ++++ .../@sigstore/verify/dist/key/sct.js | 78 ++++ .../@sigstore/verify/dist/policy.js | 24 ++ .../@sigstore/verify/dist/shared.types.js | 2 + .../verify/dist/timestamp/checkpoint.js | 157 ++++++++ .../@sigstore/verify/dist/timestamp/index.js | 46 +++ .../@sigstore/verify/dist/timestamp/merkle.js | 104 ++++++ .../@sigstore/verify/dist/timestamp/set.js | 60 +++ .../@sigstore/verify/dist/timestamp/tsa.js | 73 ++++ .../@sigstore/verify/dist/tlog/dsse.js | 57 +++ .../verify/dist/tlog/hashedrekord.js | 51 +++ .../@sigstore/verify/dist/tlog/index.js | 47 +++ .../@sigstore/verify/dist/tlog/intoto.js | 62 ++++ .../@sigstore/verify/dist/trust/filter.js | 23 ++ .../@sigstore/verify/dist/trust/index.js | 86 +++++ .../verify/dist/trust/trust.types.js | 2 + .../@sigstore/verify/dist/verifier.js | 141 +++++++ .../@sigstore/verify/package.json | 36 ++ .../pacote/node_modules/@tufjs/models/LICENSE | 21 ++ .../node_modules/@tufjs/models/dist/base.js | 92 +++++ .../@tufjs/models/dist/delegations.js | 115 ++++++ .../node_modules/@tufjs/models/dist/error.js | 27 ++ .../node_modules/@tufjs/models/dist/file.js | 183 +++++++++ .../node_modules/@tufjs/models/dist/index.js | 24 ++ .../node_modules/@tufjs/models/dist/key.js | 85 +++++ .../@tufjs/models/dist/metadata.js | 160 ++++++++ .../node_modules/@tufjs/models/dist/role.js | 299 +++++++++++++++ .../node_modules/@tufjs/models/dist/root.js | 116 ++++++ .../@tufjs/models/dist/signature.js | 38 ++ .../@tufjs/models/dist/snapshot.js | 71 ++++ .../@tufjs/models/dist/targets.js | 92 +++++ .../@tufjs/models/dist/timestamp.js | 58 +++ .../@tufjs/models/dist/utils/guard.js | 32 ++ .../@tufjs/models/dist/utils/index.js | 28 ++ .../@tufjs/models/dist/utils/key.js | 142 +++++++ .../@tufjs/models/dist/utils/oid.js | 26 ++ .../@tufjs/models/dist/utils/types.js | 2 + .../@tufjs/models/dist/utils/verify.js | 13 + .../node_modules/@tufjs/models/package.json | 37 ++ .../pacote/node_modules/sigstore/LICENSE | 202 ++++++++++ .../node_modules/sigstore/dist/config.js | 120 ++++++ .../node_modules/sigstore/dist/index.js | 34 ++ .../node_modules/sigstore/dist/sigstore.js | 102 +++++ .../pacote/node_modules/sigstore/package.json | 47 +++ .../pacote/node_modules/tuf-js/LICENSE | 21 ++ .../pacote/node_modules/tuf-js/dist/config.js | 15 + .../pacote/node_modules/tuf-js/dist/error.js | 48 +++ .../node_modules/tuf-js/dist/fetcher.js | 84 +++++ .../pacote/node_modules/tuf-js/dist/index.js | 9 + .../pacote/node_modules/tuf-js/dist/store.js | 208 +++++++++++ .../node_modules/tuf-js/dist/updater.js | 350 ++++++++++++++++++ .../node_modules/tuf-js/dist/utils/tmpfile.js | 25 ++ .../node_modules/tuf-js/dist/utils/url.js | 13 + .../pacote/node_modules/tuf-js/package.json | 43 +++ node_modules/pacote/package.json | 4 +- package-lock.json | 141 ++++++- package.json | 2 +- 140 files changed, 9454 insertions(+), 23 deletions(-) create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/LICENSE create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/package.json create mode 100644 node_modules/pacote/node_modules/@sigstore/core/LICENSE create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/json.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/oid.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/pem.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/stream.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js create mode 100644 node_modules/pacote/node_modules/@sigstore/core/package.json create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/LICENSE create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/error.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/package.json create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/LICENSE create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/package.json create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/seeds.json create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/error.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/package.json create mode 100644 node_modules/pacote/node_modules/@tufjs/models/LICENSE create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/base.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/error.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/file.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/index.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/key.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/role.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/root.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/signature.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/targets.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js create mode 100644 node_modules/pacote/node_modules/@tufjs/models/package.json create mode 100644 node_modules/pacote/node_modules/sigstore/LICENSE create mode 100644 node_modules/pacote/node_modules/sigstore/dist/config.js create mode 100644 node_modules/pacote/node_modules/sigstore/dist/index.js create mode 100644 node_modules/pacote/node_modules/sigstore/dist/sigstore.js create mode 100644 node_modules/pacote/node_modules/sigstore/package.json create mode 100644 node_modules/pacote/node_modules/tuf-js/LICENSE create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/config.js create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/error.js create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/fetcher.js create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/index.js create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/store.js create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/updater.js create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/utils/url.js create mode 100644 node_modules/pacote/node_modules/tuf-js/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 85d675d1bd3b4..ca0c5fbd69ae3 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -202,6 +202,20 @@ !/p-map !/package-json-from-dist !/pacote +!/pacote/node_modules/ +/pacote/node_modules/* +!/pacote/node_modules/@sigstore/ +/pacote/node_modules/@sigstore/* +!/pacote/node_modules/@sigstore/bundle +!/pacote/node_modules/@sigstore/core +!/pacote/node_modules/@sigstore/sign +!/pacote/node_modules/@sigstore/tuf +!/pacote/node_modules/@sigstore/verify +!/pacote/node_modules/@tufjs/ +/pacote/node_modules/@tufjs/* +!/pacote/node_modules/@tufjs/models +!/pacote/node_modules/sigstore +!/pacote/node_modules/tuf-js !/parse-conflict-json !/path-key !/path-scurry diff --git a/node_modules/pacote/lib/dir.js b/node_modules/pacote/lib/dir.js index f3229b34e463a..4ae97c216fe64 100644 --- a/node_modules/pacote/lib/dir.js +++ b/node_modules/pacote/lib/dir.js @@ -39,6 +39,8 @@ class DirFetcher extends Fetcher { const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe' return runScript({ + // this || undefined is because runScript will be unhappy with the default null value + scriptShell: this.opts.scriptShell || undefined, pkg: mani, event: 'prepare', path: this.resolved, diff --git a/node_modules/pacote/lib/fetcher.js b/node_modules/pacote/lib/fetcher.js index cc2c2db70c697..f2ac97619d3af 100644 --- a/node_modules/pacote/lib/fetcher.js +++ b/node_modules/pacote/lib/fetcher.js @@ -188,7 +188,15 @@ class FetcherBase { // private // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match #tarballFromCache () { - return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) + const startTime = Date.now() + const stream = cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) + const elapsedTime = Date.now() - startTime + // cache is good, so log it as a hit in particular since there was no fetch logged + log.http( + 'cache', + `${this.spec} ${elapsedTime}ms (cache hit)` + ) + return stream } get [_.cacheFetches] () { diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE b/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js new file mode 100644 index 0000000000000..ade736407554c --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js @@ -0,0 +1,100 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toMessageSignatureBundle = toMessageSignatureBundle; +exports.toDSSEBundle = toDSSEBundle; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(options) { + return { + mediaType: options.certificateChain + ? bundle_1.BUNDLE_V02_MEDIA_TYPE + : bundle_1.BUNDLE_V03_MEDIA_TYPE, + content: { + $case: 'messageSignature', + messageSignature: { + messageDigest: { + algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, + digest: options.digest, + }, + signature: options.signature, + }, + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(options) { + return { + mediaType: options.certificateChain + ? bundle_1.BUNDLE_V02_MEDIA_TYPE + : bundle_1.BUNDLE_V03_MEDIA_TYPE, + content: { + $case: 'dsseEnvelope', + dsseEnvelope: toEnvelope(options), + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +function toEnvelope(options) { + return { + payloadType: options.artifactType, + payload: options.artifact, + signatures: [toSignature(options)], + }; +} +function toSignature(options) { + return { + keyid: options.keyHint || '', + sig: options.signature, + }; +} +// Verification material +function toVerificationMaterial(options) { + return { + content: toKeyContent(options), + tlogEntries: [], + timestampVerificationData: { rfc3161Timestamps: [] }, + }; +} +function toKeyContent(options) { + if (options.certificate) { + if (options.certificateChain) { + return { + $case: 'x509CertificateChain', + x509CertificateChain: { + certificates: [{ rawBytes: options.certificate }], + }, + }; + } + else { + return { + $case: 'certificate', + certificate: { rawBytes: options.certificate }, + }; + } + } + else { + return { + $case: 'publicKey', + publicKey: { + hint: options.keyHint || '', + }, + }; + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js new file mode 100644 index 0000000000000..eb67a0ddc17bb --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0; +exports.isBundleWithCertificateChain = isBundleWithCertificateChain; +exports.isBundleWithPublicKey = isBundleWithPublicKey; +exports.isBundleWithMessageSignature = isBundleWithMessageSignature; +exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope; +exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; +exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2'; +exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3'; +exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json'; +// Type guards for bundle variants. +function isBundleWithCertificateChain(b) { + return b.verificationMaterial.content.$case === 'x509CertificateChain'; +} +function isBundleWithPublicKey(b) { + return b.verificationMaterial.content.$case === 'publicKey'; +} +function isBundleWithMessageSignature(b) { + return b.content.$case === 'messageSignature'; +} +function isBundleWithDsseEnvelope(b) { + return b.content.$case === 'dsseEnvelope'; +} diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js new file mode 100644 index 0000000000000..f84295323b812 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ValidationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ValidationError extends Error { + constructor(message, fields) { + super(message); + this.fields = fields; + } +} +exports.ValidationError = ValidationError; diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js new file mode 100644 index 0000000000000..1b012acad4d85 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var build_1 = require("./build"); +Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } }); +Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } }); +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } }); +Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } }); +Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } }); +Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } }); +Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } }); +var serialized_1 = require("./serialized"); +Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } }); +Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } }); +Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } }); +Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } }); +var validate_1 = require("./validate"); +Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } }); +Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } }); +Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } }); +Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } }); +Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js new file mode 100644 index 0000000000000..be0d2a2d54d09 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +const validate_1 = require("./validate"); +const bundleFromJSON = (obj) => { + const bundle = protobuf_specs_1.Bundle.fromJSON(obj); + switch (bundle.mediaType) { + case bundle_1.BUNDLE_V01_MEDIA_TYPE: + (0, validate_1.assertBundleV01)(bundle); + break; + case bundle_1.BUNDLE_V02_MEDIA_TYPE: + (0, validate_1.assertBundleV02)(bundle); + break; + default: + (0, validate_1.assertBundleLatest)(bundle); + break; + } + return bundle; +}; +exports.bundleFromJSON = bundleFromJSON; +const bundleToJSON = (bundle) => { + return protobuf_specs_1.Bundle.toJSON(bundle); +}; +exports.bundleToJSON = bundleToJSON; +const envelopeFromJSON = (obj) => { + return protobuf_specs_1.Envelope.fromJSON(obj); +}; +exports.envelopeFromJSON = envelopeFromJSON; +const envelopeToJSON = (envelope) => { + return protobuf_specs_1.Envelope.toJSON(envelope); +}; +exports.envelopeToJSON = envelopeToJSON; diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js new file mode 100644 index 0000000000000..21b8b5ee293ba --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js @@ -0,0 +1,199 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertBundle = assertBundle; +exports.assertBundleV01 = assertBundleV01; +exports.isBundleV01 = isBundleV01; +exports.assertBundleV02 = assertBundleV02; +exports.assertBundleLatest = assertBundleLatest; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("./error"); +// Performs basic validation of a Sigstore bundle to ensure that all required +// fields are populated. This is not a complete validation of the bundle, but +// rather a check that the bundle is in a valid state to be processed by the +// rest of the code. +function assertBundle(b) { + const invalidValues = validateBundleBase(b); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +// Asserts that the given bundle conforms to the v0.1 bundle format. +function assertBundleV01(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionPromise(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues); + } +} +// Type guard to determine if Bundle is a v0.1 bundle. +function isBundleV01(b) { + try { + assertBundleV01(b); + return true; + } + catch (e) { + return false; + } +} +// Asserts that the given bundle conforms to the v0.2 bundle format. +function assertBundleV02(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionProof(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues); + } +} +// Asserts that the given bundle conforms to the newest (0.3) bundle format. +function assertBundleLatest(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionProof(b)); + invalidValues.push(...validateNoCertificateChain(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +function validateBundleBase(b) { + const invalidValues = []; + // Media type validation + if (b.mediaType === undefined || + (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) && + !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) { + invalidValues.push('mediaType'); + } + // Content-related validation + if (b.content === undefined) { + invalidValues.push('content'); + } + else { + switch (b.content.$case) { + case 'messageSignature': + if (b.content.messageSignature.messageDigest === undefined) { + invalidValues.push('content.messageSignature.messageDigest'); + } + else { + if (b.content.messageSignature.messageDigest.digest.length === 0) { + invalidValues.push('content.messageSignature.messageDigest.digest'); + } + } + if (b.content.messageSignature.signature.length === 0) { + invalidValues.push('content.messageSignature.signature'); + } + break; + case 'dsseEnvelope': + if (b.content.dsseEnvelope.payload.length === 0) { + invalidValues.push('content.dsseEnvelope.payload'); + } + if (b.content.dsseEnvelope.signatures.length !== 1) { + invalidValues.push('content.dsseEnvelope.signatures'); + } + else { + if (b.content.dsseEnvelope.signatures[0].sig.length === 0) { + invalidValues.push('content.dsseEnvelope.signatures[0].sig'); + } + } + break; + } + } + // Verification material-related validation + if (b.verificationMaterial === undefined) { + invalidValues.push('verificationMaterial'); + } + else { + if (b.verificationMaterial.content === undefined) { + invalidValues.push('verificationMaterial.content'); + } + else { + switch (b.verificationMaterial.content.$case) { + case 'x509CertificateChain': + if (b.verificationMaterial.content.x509CertificateChain.certificates + .length === 0) { + invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates'); + } + b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => { + if (cert.rawBytes.length === 0) { + invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`); + } + }); + break; + case 'certificate': + if (b.verificationMaterial.content.certificate.rawBytes.length === 0) { + invalidValues.push('verificationMaterial.content.certificate.rawBytes'); + } + break; + } + } + if (b.verificationMaterial.tlogEntries === undefined) { + invalidValues.push('verificationMaterial.tlogEntries'); + } + else { + if (b.verificationMaterial.tlogEntries.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.logId === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`); + } + if (entry.kindVersion === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`); + } + }); + } + } + } + return invalidValues; +} +// Necessary for V01 bundles +function validateInclusionPromise(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionPromise === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`); + } + }); + } + return invalidValues; +} +// Necessary for V02 and later bundles +function validateInclusionProof(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionProof === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`); + } + else { + if (entry.inclusionProof.checkpoint === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`); + } + } + }); + } + return invalidValues; +} +// Necessary for V03 and later bundles +function validateNoCertificateChain(b) { + const invalidValues = []; + /* istanbul ignore next */ + if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') { + invalidValues.push('verificationMaterial.content.$case'); + } + return invalidValues; +} diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/package.json b/node_modules/pacote/node_modules/@sigstore/bundle/package.json new file mode 100644 index 0000000000000..ee5d2b92b801a --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/bundle/package.json @@ -0,0 +1,35 @@ +{ + "name": "@sigstore/bundle", + "version": "3.0.0", + "description": "Sigstore bundle type", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/LICENSE b/node_modules/pacote/node_modules/@sigstore/core/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js new file mode 100644 index 0000000000000..17d93b0f7e706 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1TypeError = exports.ASN1ParseError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ASN1ParseError extends Error { +} +exports.ASN1ParseError = ASN1ParseError; +class ASN1TypeError extends Error { +} +exports.ASN1TypeError = ASN1TypeError; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js new file mode 100644 index 0000000000000..348b2ea4022e5 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var obj_1 = require("./obj"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js new file mode 100644 index 0000000000000..cb7ebf09dbefa --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js @@ -0,0 +1,62 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.decodeLength = decodeLength; +exports.encodeLength = encodeLength; +const error_1 = require("./error"); +// Decodes the length of a DER-encoded ANS.1 element from the supplied stream. +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes +function decodeLength(stream) { + const buf = stream.getUint8(); + // If the most significant bit is UNSET the length is just the value of the + // byte. + if ((buf & 0x80) === 0x00) { + return buf; + } + // Otherwise, the lower 7 bits of the first byte indicate the number of bytes + // that follow to encode the length. + const byteCount = buf & 0x7f; + // Ensure the encoded length can safely fit in a JS number. + if (byteCount > 6) { + throw new error_1.ASN1ParseError('length exceeds 6 byte limit'); + } + // Iterate over the bytes that encode the length. + let len = 0; + for (let i = 0; i < byteCount; i++) { + len = len * 256 + stream.getUint8(); + } + // This is a valid ASN.1 length encoding, but we don't support it. + if (len === 0) { + throw new error_1.ASN1ParseError('indefinite length encoding not supported'); + } + return len; +} +// Translates the supplied value to a DER-encoded length. +function encodeLength(len) { + if (len < 128) { + return Buffer.from([len]); + } + // Bitwise operations on large numbers are not supported in JS, so we need to + // use BigInts. + let val = BigInt(len); + const bytes = []; + while (val > 0n) { + bytes.unshift(Number(val & 255n)); + val = val >> 8n; + } + return Buffer.from([0x80 | bytes.length, ...bytes]); +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js new file mode 100644 index 0000000000000..5f9ac9cdbc493 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js @@ -0,0 +1,152 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const stream_1 = require("../stream"); +const error_1 = require("./error"); +const length_1 = require("./length"); +const parse_1 = require("./parse"); +const tag_1 = require("./tag"); +class ASN1Obj { + constructor(tag, value, subs) { + this.tag = tag; + this.value = value; + this.subs = subs; + } + // Constructs an ASN.1 object from a Buffer of DER-encoded bytes. + static parseBuffer(buf) { + return parseStream(new stream_1.ByteStream(buf)); + } + toDER() { + const valueStream = new stream_1.ByteStream(); + if (this.subs.length > 0) { + for (const sub of this.subs) { + valueStream.appendView(sub.toDER()); + } + } + else { + valueStream.appendView(this.value); + } + const value = valueStream.buffer; + // Concat tag/length/value + const obj = new stream_1.ByteStream(); + obj.appendChar(this.tag.toDER()); + obj.appendView((0, length_1.encodeLength)(value.length)); + obj.appendView(value); + return obj.buffer; + } + ///////////////////////////////////////////////////////////////////////////// + // Convenience methods for parsing ASN.1 primitives into JS types + // Returns the ASN.1 object's value as a boolean. Throws an error if the + // object is not a boolean. + toBoolean() { + if (!this.tag.isBoolean()) { + throw new error_1.ASN1TypeError('not a boolean'); + } + return (0, parse_1.parseBoolean)(this.value); + } + // Returns the ASN.1 object's value as a BigInt. Throws an error if the + // object is not an integer. + toInteger() { + if (!this.tag.isInteger()) { + throw new error_1.ASN1TypeError('not an integer'); + } + return (0, parse_1.parseInteger)(this.value); + } + // Returns the ASN.1 object's value as an OID string. Throws an error if the + // object is not an OID. + toOID() { + if (!this.tag.isOID()) { + throw new error_1.ASN1TypeError('not an OID'); + } + return (0, parse_1.parseOID)(this.value); + } + // Returns the ASN.1 object's value as a Date. Throws an error if the object + // is not either a UTCTime or a GeneralizedTime. + toDate() { + switch (true) { + case this.tag.isUTCTime(): + return (0, parse_1.parseTime)(this.value, true); + case this.tag.isGeneralizedTime(): + return (0, parse_1.parseTime)(this.value, false); + default: + throw new error_1.ASN1TypeError('not a date'); + } + } + // Returns the ASN.1 object's value as a number[] where each number is the + // value of a bit in the bit string. Throws an error if the object is not a + // bit string. + toBitString() { + if (!this.tag.isBitString()) { + throw new error_1.ASN1TypeError('not a bit string'); + } + return (0, parse_1.parseBitString)(this.value); + } +} +exports.ASN1Obj = ASN1Obj; +///////////////////////////////////////////////////////////////////////////// +// Internal stream parsing functions +function parseStream(stream) { + // Parse tag, length, and value from stream + const tag = new tag_1.ASN1Tag(stream.getUint8()); + const len = (0, length_1.decodeLength)(stream); + const value = stream.slice(stream.position, len); + const start = stream.position; + let subs = []; + // If the object is constructed, parse its children. Sometimes, children + // are embedded in OCTESTRING objects, so we need to check those + // for children as well. + if (tag.constructed) { + subs = collectSubs(stream, len); + } + else if (tag.isOctetString()) { + // Attempt to parse children of OCTETSTRING objects. If anything fails, + // assume the object is not constructed and treat as primitive. + try { + subs = collectSubs(stream, len); + } + catch (e) { + // Fail silently and treat as primitive + } + } + // If there are no children, move stream cursor to the end of the object + if (subs.length === 0) { + stream.seek(start + len); + } + return new ASN1Obj(tag, value, subs); +} +function collectSubs(stream, len) { + // Calculate end of object content + const end = stream.position + len; + // Make sure there are enough bytes left in the stream. This should never + // happen, cause it'll get caught when the stream is sliced in parseStream. + // Leaving as an extra check just in case. + /* istanbul ignore if */ + if (end > stream.length) { + throw new error_1.ASN1ParseError('invalid length'); + } + // Parse all children + const subs = []; + while (stream.position < end) { + subs.push(parseStream(stream)); + } + // When we're done parsing children, we should be at the end of the object + if (stream.position !== end) { + throw new error_1.ASN1ParseError('invalid length'); + } + return subs; +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js new file mode 100644 index 0000000000000..7fbb42632c60e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js @@ -0,0 +1,124 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseInteger = parseInteger; +exports.parseStringASCII = parseStringASCII; +exports.parseTime = parseTime; +exports.parseOID = parseOID; +exports.parseBoolean = parseBoolean; +exports.parseBitString = parseBitString; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; +const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; +// Parse a BigInt from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer +function parseInteger(buf) { + let pos = 0; + const end = buf.length; + let val = buf[pos]; + const neg = val > 0x7f; + // Consume any padding bytes + const pad = neg ? 0xff : 0x00; + while (val == pad && ++pos < end) { + val = buf[pos]; + } + // Calculate remaining bytes to read + const len = end - pos; + if (len === 0) + return BigInt(neg ? -1 : 0); + // Handle two's complement for negative numbers + val = neg ? val - 256 : val; + // Parse remaining bytes + let n = BigInt(val); + for (let i = pos + 1; i < end; ++i) { + n = n * BigInt(256) + BigInt(buf[i]); + } + return n; +} +// Parse an ASCII string from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean +function parseStringASCII(buf) { + return buf.toString('ascii'); +} +// Parse a Date from the DER-encoded buffer +// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1 +function parseTime(buf, shortYear) { + const timeStr = parseStringASCII(buf); + // Parse the time string into matches - captured groups start at index 1 + const m = shortYear + ? RE_TIME_SHORT_YEAR.exec(timeStr) + : RE_TIME_LONG_YEAR.exec(timeStr); + if (!m) { + throw new Error('invalid time'); + } + // Translate dates with a 2-digit year to 4 digits per the spec + if (shortYear) { + let year = Number(m[1]); + year += year >= 50 ? 1900 : 2000; + m[1] = year.toString(); + } + // Translate to ISO8601 format and parse + return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`); +} +// Parse an OID from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier +function parseOID(buf) { + let pos = 0; + const end = buf.length; + // Consume first byte which encodes the first two OID components + let n = buf[pos++]; + const first = Math.floor(n / 40); + const second = n % 40; + let oid = `${first}.${second}`; + // Consume remaining bytes + let val = 0; + for (; pos < end; ++pos) { + n = buf[pos]; + val = (val << 7) + (n & 0x7f); + // If the left-most bit is NOT set, then this is the last byte in the + // sequence and we can add the value to the OID and reset the accumulator + if ((n & 0x80) === 0) { + oid += `.${val}`; + val = 0; + } + } + return oid; +} +// Parse a boolean from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean +function parseBoolean(buf) { + return buf[0] !== 0; +} +// Parse a bit string from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string +function parseBitString(buf) { + // First byte tell us how many unused bits are in the last byte + const unused = buf[0]; + const start = 1; + const end = buf.length; + const bits = []; + for (let i = start; i < end; ++i) { + const byte = buf[i]; + // The skip value is only used for the last byte + const skip = i === end - 1 ? unused : 0; + // Iterate over each bit in the byte (most significant first) + for (let j = 7; j >= skip; --j) { + // Read the bit and add it to the bit string + bits.push((byte >> j) & 0x01); + } + } + return bits; +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js new file mode 100644 index 0000000000000..84dd938d049aa --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js @@ -0,0 +1,86 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Tag = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("./error"); +const UNIVERSAL_TAG = { + BOOLEAN: 0x01, + INTEGER: 0x02, + BIT_STRING: 0x03, + OCTET_STRING: 0x04, + OBJECT_IDENTIFIER: 0x06, + SEQUENCE: 0x10, + SET: 0x11, + PRINTABLE_STRING: 0x13, + UTC_TIME: 0x17, + GENERALIZED_TIME: 0x18, +}; +const TAG_CLASS = { + UNIVERSAL: 0x00, + APPLICATION: 0x01, + CONTEXT_SPECIFIC: 0x02, + PRIVATE: 0x03, +}; +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes +class ASN1Tag { + constructor(enc) { + // Bits 0 through 4 are the tag number + this.number = enc & 0x1f; + // Bit 5 is the constructed bit + this.constructed = (enc & 0x20) === 0x20; + // Bit 6 & 7 are the class + this.class = enc >> 6; + if (this.number === 0x1f) { + throw new error_1.ASN1ParseError('long form tags not supported'); + } + if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) { + throw new error_1.ASN1ParseError('unsupported tag 0x00'); + } + } + isUniversal() { + return this.class === TAG_CLASS.UNIVERSAL; + } + isContextSpecific(num) { + const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC; + return num !== undefined ? res && this.number === num : res; + } + isBoolean() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN; + } + isInteger() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER; + } + isBitString() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING; + } + isOctetString() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING; + } + isOID() { + return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER); + } + isUTCTime() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME; + } + isGeneralizedTime() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME; + } + toDER() { + return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6); + } +} +exports.ASN1Tag = ASN1Tag; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js b/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js new file mode 100644 index 0000000000000..296b5ba43e86a --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js @@ -0,0 +1,60 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createPublicKey = createPublicKey; +exports.digest = digest; +exports.verify = verify; +exports.bufferEqual = bufferEqual; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +function createPublicKey(key, type = 'spki') { + if (typeof key === 'string') { + return crypto_1.default.createPublicKey(key); + } + else { + return crypto_1.default.createPublicKey({ key, format: 'der', type: type }); + } +} +function digest(algorithm, ...data) { + const hash = crypto_1.default.createHash(algorithm); + for (const d of data) { + hash.update(d); + } + return hash.digest(); +} +function verify(data, key, signature, algorithm) { + // The try/catch is to work around an issue in Node 14.x where verify throws + // an error in some scenarios if the signature is invalid. + try { + return crypto_1.default.verify(algorithm, data, key, signature); + } + catch (e) { + /* istanbul ignore next */ + return false; + } +} +function bufferEqual(a, b) { + try { + return crypto_1.default.timingSafeEqual(a, b); + } + catch { + /* istanbul ignore next */ + return false; + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js new file mode 100644 index 0000000000000..ca7b63630e2ba --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.preAuthEncoding = preAuthEncoding; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PAE_PREFIX = 'DSSEv1'; +// DSSE Pre-Authentication Encoding +function preAuthEncoding(payloadType, payload) { + const prefix = [ + PAE_PREFIX, + payloadType.length, + payloadType, + payload.length, + '', + ].join(' '); + return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]); +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js b/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js new file mode 100644 index 0000000000000..7113af66db4c2 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.base64Encode = base64Encode; +exports.base64Decode = base64Decode; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const BASE64_ENCODING = 'base64'; +const UTF8_ENCODING = 'utf-8'; +function base64Encode(str) { + return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING); +} +function base64Decode(str) { + return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING); +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/index.js new file mode 100644 index 0000000000000..ac35e86a8df7d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/index.js @@ -0,0 +1,56 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var asn1_1 = require("./asn1"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } }); +exports.crypto = __importStar(require("./crypto")); +exports.dsse = __importStar(require("./dsse")); +exports.encoding = __importStar(require("./encoding")); +exports.json = __importStar(require("./json")); +exports.pem = __importStar(require("./pem")); +var rfc3161_1 = require("./rfc3161"); +Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } }); +var stream_1 = require("./stream"); +Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } }); +var x509_1 = require("./x509"); +Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } }); +Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } }); +Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/json.js b/node_modules/pacote/node_modules/@sigstore/core/dist/json.js new file mode 100644 index 0000000000000..7808d033b98cc --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/json.js @@ -0,0 +1,60 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.canonicalize = canonicalize; +// JSON canonicalization per https://github.com/cyberphone/json-canonicalization +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function canonicalize(object) { + let buffer = ''; + if (object === null || typeof object !== 'object' || object.toJSON != null) { + // Primitives or toJSONable objects + buffer += JSON.stringify(object); + } + else if (Array.isArray(object)) { + // Array - maintain element order + buffer += '['; + let first = true; + object.forEach((element) => { + if (!first) { + buffer += ','; + } + first = false; + // recursive call + buffer += canonicalize(element); + }); + buffer += ']'; + } + else { + // Object - Sort properties before serializing + buffer += '{'; + let first = true; + Object.keys(object) + .sort() + .forEach((property) => { + if (!first) { + buffer += ','; + } + first = false; + buffer += JSON.stringify(property); + buffer += ':'; + // recursive call + buffer += canonicalize(object[property]); + }); + buffer += '}'; + } + return buffer; +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js b/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js new file mode 100644 index 0000000000000..ac7a643067ad0 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0; +exports.ECDSA_SIGNATURE_ALGOS = { + '1.2.840.10045.4.3.1': 'sha224', + '1.2.840.10045.4.3.2': 'sha256', + '1.2.840.10045.4.3.3': 'sha384', + '1.2.840.10045.4.3.4': 'sha512', +}; +exports.SHA2_HASH_ALGOS = { + '2.16.840.1.101.3.4.2.1': 'sha256', + '2.16.840.1.101.3.4.2.2': 'sha384', + '2.16.840.1.101.3.4.2.3': 'sha512', +}; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js b/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js new file mode 100644 index 0000000000000..f1241d28d586e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDER = toDER; +exports.fromDER = fromDER; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PEM_HEADER = /-----BEGIN (.*)-----/; +const PEM_FOOTER = /-----END (.*)-----/; +function toDER(certificate) { + let der = ''; + certificate.split('\n').forEach((line) => { + if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) { + return; + } + der += line; + }); + return Buffer.from(der, 'base64'); +} +// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM +// encoding dictates that each certificate should have a trailing newline after +// the footer. +function fromDER(certificate, type = 'CERTIFICATE') { + // Base64-encode the certificate. + const der = certificate.toString('base64'); + // Split the certificate into lines of 64 characters. + const lines = der.match(/.{1,64}/g) || ''; + return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`] + .join('\n') + .concat('\n'); +} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js new file mode 100644 index 0000000000000..b9b549b0bb323 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161TimestampVerificationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class RFC3161TimestampVerificationError extends Error { +} +exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js new file mode 100644 index 0000000000000..b77ecf1c7d50c --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js @@ -0,0 +1,20 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161Timestamp = void 0; +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js new file mode 100644 index 0000000000000..3e61fc1a4e169 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js @@ -0,0 +1,201 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161Timestamp = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const asn1_1 = require("../asn1"); +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const error_1 = require("./error"); +const tstinfo_1 = require("./tstinfo"); +const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2'; +const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4'; +const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4'; +class RFC3161Timestamp { + constructor(asn1) { + this.root = asn1; + } + static parse(der) { + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); + return new RFC3161Timestamp(asn1); + } + get status() { + return this.pkiStatusInfoObj.subs[0].toInteger(); + } + get contentType() { + return this.contentTypeObj.toOID(); + } + get eContentType() { + return this.eContentTypeObj.toOID(); + } + get signingTime() { + return this.tstInfo.genTime; + } + get signerIssuer() { + return this.signerSidObj.subs[0].value; + } + get signerSerialNumber() { + return this.signerSidObj.subs[1].value; + } + get signerDigestAlgorithm() { + const oid = this.signerDigestAlgorithmObj.subs[0].toOID(); + return oid_1.SHA2_HASH_ALGOS[oid]; + } + get signatureAlgorithm() { + const oid = this.signatureAlgorithmObj.subs[0].toOID(); + return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; + } + get signatureValue() { + return this.signatureValueObj.value; + } + get tstInfo() { + // Need to unpack tstInfo from an OCTET STRING + return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]); + } + verify(data, publicKey) { + if (!this.timeStampTokenObj) { + throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing'); + } + // Check for expected ContentInfo content type + if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) { + throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`); + } + // Check for expected encapsulated content type + if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) { + throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`); + } + // Check that the tstInfo references the correct artifact + this.tstInfo.verify(data); + // Check that the signed message digest matches the tstInfo + this.verifyMessageDigest(); + // Check that the signature is valid for the signed attributes + this.verifySignature(publicKey); + } + verifyMessageDigest() { + // Check that the tstInfo matches the signed data + const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw); + const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value; + if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) { + throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo'); + } + } + verifySignature(key) { + // Encode the signed attributes for verification + const signedAttrs = this.signedAttrsObj.toDER(); + signedAttrs[0] = 0x31; // Change context-specific tag to SET + // Check that the signature is valid for the signed attributes + const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm); + if (!verified) { + throw new error_1.RFC3161TimestampVerificationError('signature verification failed'); + } + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get pkiStatusInfoObj() { + // pkiStatusInfo is the first element of the timestamp response sequence + return this.root.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get timeStampTokenObj() { + // timeStampToken is the first element of the timestamp response sequence + return this.root.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-3 + get contentTypeObj() { + return this.timeStampTokenObj.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5652#section-3 + get signedDataObj() { + const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); + return obj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 + get encapContentInfoObj() { + return this.signedDataObj.subs[2]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 + get signerInfosObj() { + // SignerInfos is the last element of the signed data sequence + const sd = this.signedDataObj; + return sd.subs[sd.subs.length - 1]; + } + // https://www.rfc-editor.org/rfc/rfc5652#section-5.1 + get signerInfoObj() { + // Only supporting one signer + return this.signerInfosObj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 + get eContentTypeObj() { + return this.encapContentInfoObj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 + get eContentObj() { + return this.encapContentInfoObj.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signedAttrsObj() { + const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); + return signedAttrs; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get messageDigestAttributeObj() { + const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() && + sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY); + return messageDigest; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signerSidObj() { + return this.signerInfoObj.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signerDigestAlgorithmObj() { + // Signature is the 2nd element of the signerInfoObj object + return this.signerInfoObj.subs[2]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signatureAlgorithmObj() { + // Signature is the 4th element of the signerInfoObj object + return this.signerInfoObj.subs[4]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signatureValueObj() { + // Signature is the 6th element of the signerInfoObj object + return this.signerInfoObj.subs[5]; + } +} +exports.RFC3161Timestamp = RFC3161Timestamp; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js new file mode 100644 index 0000000000000..dc8e4fb339383 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js @@ -0,0 +1,61 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSTInfo = void 0; +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const error_1 = require("./error"); +class TSTInfo { + constructor(asn1) { + this.root = asn1; + } + get version() { + return this.root.subs[0].toInteger(); + } + get genTime() { + return this.root.subs[4].toDate(); + } + get messageImprintHashAlgorithm() { + const oid = this.messageImprintObj.subs[0].subs[0].toOID(); + return oid_1.SHA2_HASH_ALGOS[oid]; + } + get messageImprintHashedMessage() { + return this.messageImprintObj.subs[1].value; + } + get raw() { + return this.root.toDER(); + } + verify(data) { + const digest = crypto.digest(this.messageImprintHashAlgorithm, data); + if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) { + throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact'); + } + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get messageImprintObj() { + return this.root.subs[2]; + } +} +exports.TSTInfo = TSTInfo; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js b/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js new file mode 100644 index 0000000000000..0a24f8582eb23 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js @@ -0,0 +1,115 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ByteStream = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class StreamError extends Error { +} +class ByteStream { + constructor(buffer) { + this.start = 0; + if (buffer) { + this.buf = buffer; + this.view = Buffer.from(buffer); + } + else { + this.buf = new ArrayBuffer(0); + this.view = Buffer.from(this.buf); + } + } + get buffer() { + return this.view.subarray(0, this.start); + } + get length() { + return this.view.byteLength; + } + get position() { + return this.start; + } + seek(position) { + this.start = position; + } + // Returns a Buffer containing the specified number of bytes starting at the + // given start position. + slice(start, len) { + const end = start + len; + if (end > this.length) { + throw new StreamError('request past end of buffer'); + } + return this.view.subarray(start, end); + } + appendChar(char) { + this.ensureCapacity(1); + this.view[this.start] = char; + this.start += 1; + } + appendUint16(num) { + this.ensureCapacity(2); + const value = new Uint16Array([num]); + const view = new Uint8Array(value.buffer); + this.view[this.start] = view[1]; + this.view[this.start + 1] = view[0]; + this.start += 2; + } + appendUint24(num) { + this.ensureCapacity(3); + const value = new Uint32Array([num]); + const view = new Uint8Array(value.buffer); + this.view[this.start] = view[2]; + this.view[this.start + 1] = view[1]; + this.view[this.start + 2] = view[0]; + this.start += 3; + } + appendView(view) { + this.ensureCapacity(view.length); + this.view.set(view, this.start); + this.start += view.length; + } + getBlock(size) { + if (size <= 0) { + return Buffer.alloc(0); + } + if (this.start + size > this.view.length) { + throw new Error('request past end of buffer'); + } + const result = this.view.subarray(this.start, this.start + size); + this.start += size; + return result; + } + getUint8() { + return this.getBlock(1)[0]; + } + getUint16() { + const block = this.getBlock(2); + return (block[0] << 8) | block[1]; + } + ensureCapacity(size) { + if (this.start + size > this.view.byteLength) { + const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0); + this.realloc(this.view.byteLength + blockSize); + } + } + realloc(size) { + const newArray = new ArrayBuffer(size); + const newView = Buffer.from(newArray); + // Copy the old buffer into the new one + newView.set(this.view); + this.buf = newArray; + this.view = newView; + } +} +exports.ByteStream = ByteStream; +ByteStream.BLOCK_SIZE = 1024; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js new file mode 100644 index 0000000000000..72ea8e0738bc8 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js @@ -0,0 +1,230 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const asn1_1 = require("../asn1"); +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const pem = __importStar(require("../pem")); +const ext_1 = require("./ext"); +const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14'; +const EXTENSION_OID_KEY_USAGE = '2.5.29.15'; +const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17'; +const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19'; +const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35'; +exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2'; +class X509Certificate { + constructor(asn1) { + this.root = asn1; + } + static parse(cert) { + const der = typeof cert === 'string' ? pem.toDER(cert) : cert; + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); + return new X509Certificate(asn1); + } + get tbsCertificate() { + return this.tbsCertificateObj; + } + get version() { + // version number is the first element of the version context specific tag + const ver = this.versionObj.subs[0].toInteger(); + return `v${(ver + BigInt(1)).toString()}`; + } + get serialNumber() { + return this.serialNumberObj.value; + } + get notBefore() { + // notBefore is the first element of the validity sequence + return this.validityObj.subs[0].toDate(); + } + get notAfter() { + // notAfter is the second element of the validity sequence + return this.validityObj.subs[1].toDate(); + } + get issuer() { + return this.issuerObj.value; + } + get subject() { + return this.subjectObj.value; + } + get publicKey() { + return this.subjectPublicKeyInfoObj.toDER(); + } + get signatureAlgorithm() { + const oid = this.signatureAlgorithmObj.subs[0].toOID(); + return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; + } + get signatureValue() { + // Signature value is a bit string, so we need to skip the first byte + return this.signatureValueObj.value.subarray(1); + } + get subjectAltName() { + const ext = this.extSubjectAltName; + return ext?.uri || /* istanbul ignore next */ ext?.rfc822Name; + } + get extensions() { + // The extension list is the first (and only) element of the extensions + // context specific tag + /* istanbul ignore next */ + const extSeq = this.extensionsObj?.subs[0]; + /* istanbul ignore next */ + return extSeq?.subs || []; + } + get extKeyUsage() { + const ext = this.findExtension(EXTENSION_OID_KEY_USAGE); + return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined; + } + get extBasicConstraints() { + const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS); + return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined; + } + get extSubjectAltName() { + const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME); + return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined; + } + get extAuthorityKeyID() { + const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID); + return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined; + } + get extSubjectKeyID() { + const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID); + return ext + ? new ext_1.X509SubjectKeyIDExtension(ext) + : /* istanbul ignore next */ undefined; + } + get extSCT() { + const ext = this.findExtension(exports.EXTENSION_OID_SCT); + return ext ? new ext_1.X509SCTExtension(ext) : undefined; + } + get isCA() { + const ca = this.extBasicConstraints?.isCA || false; + // If the KeyUsage extension is present, keyCertSign must be set + if (this.extKeyUsage) { + return ca && this.extKeyUsage.keyCertSign; + } + // TODO: test coverage for this case + /* istanbul ignore next */ + return ca; + } + extension(oid) { + const ext = this.findExtension(oid); + return ext ? new ext_1.X509Extension(ext) : undefined; + } + verify(issuerCertificate) { + // Use the issuer's public key if provided, otherwise use the subject's + const publicKey = issuerCertificate?.publicKey || this.publicKey; + const key = crypto.createPublicKey(publicKey); + return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm); + } + validForDate(date) { + return this.notBefore <= date && date <= this.notAfter; + } + equals(other) { + return this.root.toDER().equals(other.root.toDER()); + } + // Creates a copy of the certificate with a new buffer + clone() { + const der = this.root.toDER(); + const clone = Buffer.alloc(der.length); + der.copy(clone); + return X509Certificate.parse(clone); + } + findExtension(oid) { + // Find the extension with the given OID. The OID will always be the first + // element of the extension sequence + return this.extensions.find((ext) => ext.subs[0].toOID() === oid); + } + ///////////////////////////////////////////////////////////////////////////// + // The following properties use the documented x509 structure to locate the + // desired ASN.1 object + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1 + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1 + get tbsCertificateObj() { + // tbsCertificate is the first element of the certificate sequence + return this.root.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2 + get signatureAlgorithmObj() { + // signatureAlgorithm is the second element of the certificate sequence + return this.root.subs[1]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3 + get signatureValueObj() { + // signatureValue is the third element of the certificate sequence + return this.root.subs[2]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1 + get versionObj() { + // version is the first element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2 + get serialNumberObj() { + // serialNumber is the second element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[1]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4 + get issuerObj() { + // issuer is the fourth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[3]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5 + get validityObj() { + // version is the fifth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[4]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6 + get subjectObj() { + // subject is the sixth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[5]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7 + get subjectPublicKeyInfoObj() { + // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[6]; + } + // Extensions can't be located by index because their position varies. Instead, + // we need to find the extensions context specific tag + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9 + get extensionsObj() { + return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03)); + } +} +exports.X509Certificate = X509Certificate; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js new file mode 100644 index 0000000000000..1d481261b0aa6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js @@ -0,0 +1,145 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0; +const stream_1 = require("../stream"); +const sct_1 = require("./sct"); +// https://www.rfc-editor.org/rfc/rfc5280#section-4.1 +class X509Extension { + constructor(asn1) { + this.root = asn1; + } + get oid() { + return this.root.subs[0].toOID(); + } + get critical() { + // The critical field is optional and will be the second element of the + // extension sequence if present. Default to false if not present. + return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false; + } + get value() { + return this.extnValueObj.value; + } + get valueObj() { + return this.extnValueObj; + } + get extnValueObj() { + // The extnValue field will be the last element of the extension sequence + return this.root.subs[this.root.subs.length - 1]; + } +} +exports.X509Extension = X509Extension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9 +class X509BasicConstraintsExtension extends X509Extension { + get isCA() { + return this.sequence.subs[0]?.toBoolean() ?? false; + } + get pathLenConstraint() { + return this.sequence.subs.length > 1 + ? this.sequence.subs[1].toInteger() + : undefined; + } + // The extnValue field contains a single sequence wrapping the isCA and + // pathLenConstraint. + get sequence() { + return this.extnValueObj.subs[0]; + } +} +exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3 +class X509KeyUsageExtension extends X509Extension { + get digitalSignature() { + return this.bitString[0] === 1; + } + get keyCertSign() { + return this.bitString[5] === 1; + } + get crlSign() { + return this.bitString[6] === 1; + } + // The extnValue field contains a single bit string which is a bit mask + // indicating which key usages are enabled. + get bitString() { + return this.extnValueObj.subs[0].toBitString(); + } +} +exports.X509KeyUsageExtension = X509KeyUsageExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6 +class X509SubjectAlternativeNameExtension extends X509Extension { + get rfc822Name() { + return this.findGeneralName(0x01)?.value.toString('ascii'); + } + get uri() { + return this.findGeneralName(0x06)?.value.toString('ascii'); + } + // Retrieve the value of an otherName with the given OID. + otherName(oid) { + const otherName = this.findGeneralName(0x00); + if (otherName === undefined) { + return undefined; + } + // The otherName is a sequence containing an OID and a value. + // Need to check that the OID matches the one we're looking for. + const otherNameOID = otherName.subs[0].toOID(); + if (otherNameOID !== oid) { + return undefined; + } + // The otherNameValue is a sequence containing the actual value. + const otherNameValue = otherName.subs[1]; + return otherNameValue.subs[0].value.toString('ascii'); + } + findGeneralName(tag) { + return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag)); + } + // The extnValue field contains a sequence of GeneralNames. + get generalNames() { + return this.extnValueObj.subs[0].subs; + } +} +exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1 +class X509AuthorityKeyIDExtension extends X509Extension { + get keyIdentifier() { + return this.findSequenceMember(0x00)?.value; + } + findSequenceMember(tag) { + return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag)); + } + // The extnValue field contains a single sequence wrapping the keyIdentifier + get sequence() { + return this.extnValueObj.subs[0]; + } +} +exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2 +class X509SubjectKeyIDExtension extends X509Extension { + get keyIdentifier() { + return this.extnValueObj.subs[0].value; + } +} +exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension; +// https://www.rfc-editor.org/rfc/rfc6962#section-3.3 +class X509SCTExtension extends X509Extension { + constructor(asn1) { + super(asn1); + } + get signedCertificateTimestamps() { + const buf = this.extnValueObj.subs[0].value; + const stream = new stream_1.ByteStream(buf); + // The overall list length is encoded in the first two bytes -- note this + // is the length of the list in bytes, NOT the number of SCTs in the list + const end = stream.getUint16() + 2; + const sctList = []; + while (stream.position < end) { + // Read the length of the next SCT + const sctLength = stream.getUint16(); + // Slice out the bytes for the next SCT and parse it + const sct = stream.getBlock(sctLength); + sctList.push(sct_1.SignedCertificateTimestamp.parse(sct)); + } + if (stream.position !== end) { + throw new Error('SCT list length does not match actual length'); + } + return sctList; + } +} +exports.X509SCTExtension = X509SCTExtension; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js new file mode 100644 index 0000000000000..cdd77e58f37d5 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js @@ -0,0 +1,23 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; +var cert_1 = require("./cert"); +Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } }); +Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } }); +var ext_1 = require("./ext"); +Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js new file mode 100644 index 0000000000000..1603059c0d1ac --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js @@ -0,0 +1,141 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SignedCertificateTimestamp = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto = __importStar(require("../crypto")); +const stream_1 = require("../stream"); +class SignedCertificateTimestamp { + constructor(options) { + this.version = options.version; + this.logID = options.logID; + this.timestamp = options.timestamp; + this.extensions = options.extensions; + this.hashAlgorithm = options.hashAlgorithm; + this.signatureAlgorithm = options.signatureAlgorithm; + this.signature = options.signature; + } + get datetime() { + return new Date(Number(this.timestamp.readBigInt64BE())); + } + // Returns the hash algorithm used to generate the SCT's signature. + // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 + get algorithm() { + switch (this.hashAlgorithm) { + /* istanbul ignore next */ + case 0: + return 'none'; + /* istanbul ignore next */ + case 1: + return 'md5'; + /* istanbul ignore next */ + case 2: + return 'sha1'; + /* istanbul ignore next */ + case 3: + return 'sha224'; + case 4: + return 'sha256'; + /* istanbul ignore next */ + case 5: + return 'sha384'; + /* istanbul ignore next */ + case 6: + return 'sha512'; + /* istanbul ignore next */ + default: + return 'unknown'; + } + } + verify(preCert, key) { + // Assemble the digitally-signed struct (the data over which the signature + // was generated). + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + const stream = new stream_1.ByteStream(); + stream.appendChar(this.version); + stream.appendChar(0x00); // SignatureType = certificate_timestamp(0) + stream.appendView(this.timestamp); + stream.appendUint16(0x01); // LogEntryType = precert_entry(1) + stream.appendView(preCert); + stream.appendUint16(this.extensions.byteLength); + /* istanbul ignore next - extensions are very uncommon */ + if (this.extensions.byteLength > 0) { + stream.appendView(this.extensions); + } + return crypto.verify(stream.buffer, key, this.signature, this.algorithm); + } + // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using + // TLS encoding which means the fields and lengths of most fields are + // specified as part of the SCT and TLS specs. + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 + static parse(buf) { + const stream = new stream_1.ByteStream(buf); + // Version - enum { v1(0), (255) } + const version = stream.getUint8(); + // Log ID - struct { opaque key_id[32]; } + const logID = stream.getBlock(32); + // Timestamp - uint64 + const timestamp = stream.getBlock(8); + // Extensions - opaque extensions<0..2^16-1>; + const extenstionLength = stream.getUint16(); + const extensions = stream.getBlock(extenstionLength); + // Hash algo - enum { sha256(4), . . . (255) } + const hashAlgorithm = stream.getUint8(); + // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) } + const signatureAlgorithm = stream.getUint8(); + // Signature - opaque signature<0..2^16-1>; + const sigLength = stream.getUint16(); + const signature = stream.getBlock(sigLength); + // Check that we read the entire buffer + if (stream.position !== buf.length) { + throw new Error('SCT buffer length mismatch'); + } + return new SignedCertificateTimestamp({ + version, + logID, + timestamp, + extensions, + hashAlgorithm, + signatureAlgorithm, + signature, + }); + } +} +exports.SignedCertificateTimestamp = SignedCertificateTimestamp; diff --git a/node_modules/pacote/node_modules/@sigstore/core/package.json b/node_modules/pacote/node_modules/@sigstore/core/package.json new file mode 100644 index 0000000000000..af5dd281ac90e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/core/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/core", + "version": "2.0.0", + "description": "Base library for Sigstore", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme", + "publishConfig": { + "provenance": true + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/LICENSE b/node_modules/pacote/node_modules/@sigstore/sign/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js new file mode 100644 index 0000000000000..61d5eba4568a3 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaseBundleBuilder = void 0; +// BaseBundleBuilder is a base class for BundleBuilder implementations. It +// provides a the basic wokflow for signing and witnessing an artifact. +// Subclasses must implement the `package` method to assemble a valid bundle +// with the generated signature and verification material. +class BaseBundleBuilder { + constructor(options) { + this.signer = options.signer; + this.witnesses = options.witnesses; + } + // Executes the signing/witnessing process for the given artifact. + async create(artifact) { + const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob)); + const bundle = await this.package(artifact, signature); + // Invoke all of the witnesses in parallel + const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key)))); + // Collect the verification material from all of the witnesses + const tlogEntryList = []; + const timestampList = []; + verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => { + tlogEntryList.push(...(tlogEntries ?? [])); + timestampList.push(...(rfc3161Timestamps ?? [])); + }); + // Merge the collected verification material into the bundle + bundle.verificationMaterial.tlogEntries = tlogEntryList; + bundle.verificationMaterial.timestampVerificationData = { + rfc3161Timestamps: timestampList, + }; + return bundle; + } + // Override this function to apply any pre-signing transformations to the + // artifact. The returned buffer will be signed by the signer. The default + // implementation simply returns the artifact data. + async prepare(artifact) { + return artifact.data; + } +} +exports.BaseBundleBuilder = BaseBundleBuilder; +// Extracts the public key from a KeyMaterial. Returns either the public key +// or the certificate, depending on the type of key material. +function publicKey(key) { + switch (key.$case) { + case 'publicKey': + return key.publicKey; + case 'x509Certificate': + return key.certificate; + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js new file mode 100644 index 0000000000000..ed32286ad88ef --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js @@ -0,0 +1,71 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toMessageSignatureBundle = toMessageSignatureBundle; +exports.toDSSEBundle = toDSSEBundle; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const sigstore = __importStar(require("@sigstore/bundle")); +const util_1 = require("../util"); +// Helper functions for assembling the parts of a Sigstore bundle +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(artifact, signature) { + const digest = util_1.crypto.digest('sha256', artifact.data); + return sigstore.toMessageSignatureBundle({ + digest, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + certificateChain: true, + }); +} +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(artifact, signature, certificateChain) { + return sigstore.toDSSEBundle({ + artifact: artifact.data, + artifactType: artifact.type, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + certificateChain, + }); +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js new file mode 100644 index 0000000000000..86046ba8f3013 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSEBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../util"); +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for DSSE wrapped attestations +class DSSEBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + this.certificateChain = options.certificateChain ?? false; + } + // DSSE requires the artifact to be pre-encoded with the payload type + // before the signature is generated. + async prepare(artifact) { + const a = artifactDefaults(artifact); + return util_1.dsse.preAuthEncoding(a.type, a.data); + } + // Packages the artifact and signature into a DSSE bundle + async package(artifact, signature) { + return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.certificateChain); + } +} +exports.DSSEBundleBuilder = DSSEBundleBuilder; +// Defaults the artifact type to an empty string if not provided +function artifactDefaults(artifact) { + return { + ...artifact, + type: artifact.type ?? '', + }; +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js new file mode 100644 index 0000000000000..d67c8c324a4f0 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var dsse_1 = require("./dsse"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } }); +var message_1 = require("./message"); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js new file mode 100644 index 0000000000000..e3991f42bab93 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for raw message signatures +class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + } + async package(artifact, signature) { + return (0, bundle_1.toMessageSignatureBundle)(artifact, signature); + } +} +exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js new file mode 100644 index 0000000000000..d28f1913cc77e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js @@ -0,0 +1,39 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.InternalError = void 0; +exports.internalError = internalError; +const error_1 = require("./external/error"); +class InternalError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.name = this.constructor.name; + this.cause = cause; + this.code = code; + } +} +exports.InternalError = InternalError; +function internalError(err, code, message) { + if (err instanceof error_1.HTTPError) { + message += ` - ${err.message}`; + } + throw new InternalError({ + code: code, + message: message, + cause: err, + }); +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js new file mode 100644 index 0000000000000..a6a65adebb176 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js @@ -0,0 +1,26 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HTTPError = void 0; +class HTTPError extends Error { + constructor({ status, message, location, }) { + super(`(${status}) ${message}`); + this.statusCode = status; + this.location = location; + } +} +exports.HTTPError = HTTPError; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js new file mode 100644 index 0000000000000..116090f3c641e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js @@ -0,0 +1,98 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fetchWithRetry = fetchWithRetry; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const http2_1 = require("http2"); +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const proc_log_1 = require("proc-log"); +const promise_retry_1 = __importDefault(require("promise-retry")); +const util_1 = require("../util"); +const error_1 = require("./error"); +const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants; +async function fetchWithRetry(url, options) { + return (0, promise_retry_1.default)(async (retry, attemptNum) => { + const method = options.method || 'POST'; + const headers = { + [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(), + ...options.headers, + }; + const response = await (0, make_fetch_happen_1.default)(url, { + method, + headers, + body: options.body, + timeout: options.timeout, + retry: false, // We're handling retries ourselves + }).catch((reason) => { + proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`); + return retry(reason); + }); + if (response.ok) { + return response; + } + else { + const error = await errorFromResponse(response); + proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`); + if (retryable(response.status)) { + return retry(error); + } + else { + throw error; + } + } + }, retryOpts(options.retry)); +} +// Translate a Response into an HTTPError instance. This will attempt to parse +// the response body for a message, but will default to the statusText if none +// is found. +const errorFromResponse = async (response) => { + let message = response.statusText; + const location = response.headers.get(HTTP2_HEADER_LOCATION) || undefined; + const contentType = response.headers.get(HTTP2_HEADER_CONTENT_TYPE); + // If response type is JSON, try to parse the body for a message + if (contentType?.includes('application/json')) { + try { + const body = await response.json(); + message = body.message || message; + } + catch (e) { + // ignore + } + } + return new error_1.HTTPError({ + status: response.status, + message: message, + location: location, + }); +}; +// Determine if a status code is retryable. This includes 5xx errors, 408, and +// 429. +const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR; +// Normalize the retry options to the format expected by promise-retry +const retryOpts = (retry) => { + if (typeof retry === 'boolean') { + return { retries: retry ? 1 : 0 }; + } + else if (typeof retry === 'number') { + return { retries: retry }; + } + else { + return { retries: 0, ...retry }; + } +}; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js new file mode 100644 index 0000000000000..de6a1ad9f9e79 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js @@ -0,0 +1,41 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Fulcio = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +/** + * Fulcio API client. + */ +class Fulcio { + constructor(options) { + this.options = options; + } + async createSigningCertificate(request) { + const { baseURL, retry, timeout } = this.options; + const url = `${baseURL}/api/v2/signingCert`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + timeout, + retry, + }); + return response.json(); + } +} +exports.Fulcio = Fulcio; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js new file mode 100644 index 0000000000000..bb59a126e032f --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js @@ -0,0 +1,80 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Rekor = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +/** + * Rekor API client. + */ +class Rekor { + constructor(options) { + this.options = options; + } + /** + * Create a new entry in the Rekor log. + * @param propsedEntry {ProposedEntry} Data to create a new entry + * @returns {Promise} The created entry + */ + async createEntry(propsedEntry) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/log/entries`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify(propsedEntry), + timeout, + retry, + }); + const data = await response.json(); + return entryFromResponse(data); + } + /** + * Get an entry from the Rekor log. + * @param uuid {string} The UUID of the entry to retrieve + * @returns {Promise} The retrieved entry + */ + async getEntry(uuid) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/log/entries/${uuid}`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + method: 'GET', + headers: { + Accept: 'application/json', + }, + timeout, + retry, + }); + const data = await response.json(); + return entryFromResponse(data); + } +} +exports.Rekor = Rekor; +// Unpack the response from the Rekor API into a more convenient format. +function entryFromResponse(data) { + const entries = Object.entries(data); + if (entries.length != 1) { + throw new Error('Received multiple entries in Rekor response'); + } + // Grab UUID and entry data from the response + const [uuid, entry] = entries[0]; + return { + ...entry, + uuid, + }; +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js new file mode 100644 index 0000000000000..a948ba9cca2c7 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimestampAuthority = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +class TimestampAuthority { + constructor(options) { + this.options = options; + } + async createTimestamp(request) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/timestamp`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + timeout, + retry, + }); + return response.buffer(); + } +} +exports.TimestampAuthority = TimestampAuthority; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js new file mode 100644 index 0000000000000..d79133952b605 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js @@ -0,0 +1,73 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +// Collection of all the CI-specific providers we have implemented +const providers = [getGHAToken, getEnv]; +/** + * CIContextProvider is a composite identity provider which will iterate + * over all of the CI-specific providers and return the token from the first + * one that resolves. + */ +class CIContextProvider { + /* istanbul ignore next */ + constructor(audience = 'sigstore') { + this.audience = audience; + } + // Invoke all registered ProviderFuncs and return the value of whichever one + // resolves first. + async getToken() { + return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available')); + } +} +exports.CIContextProvider = CIContextProvider; +/** + * getGHAToken can retrieve an OIDC token when running in a GitHub Actions + * workflow + */ +async function getGHAToken(audience) { + // Check to see if we're running in GitHub Actions + if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL || + !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) { + return Promise.reject('no token available'); + } + // Construct URL to request token w/ appropriate audience + const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL); + url.searchParams.append('audience', audience); + const response = await (0, make_fetch_happen_1.default)(url.href, { + retry: 2, + headers: { + Accept: 'application/json', + Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`, + }, + }); + return response.json().then((data) => data.value); +} +/** + * getEnv can retrieve an OIDC token from an environment variable. + * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar + */ +async function getEnv() { + if (!process.env.SIGSTORE_ID_TOKEN) { + return Promise.reject('no token available'); + } + return process.env.SIGSTORE_ID_TOKEN; +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js new file mode 100644 index 0000000000000..1c1223b443fab --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var ci_1 = require("./ci"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js new file mode 100644 index 0000000000000..383b76083361b --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var bundler_1 = require("./bundler"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } }); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } }); +var identity_1 = require("./identity"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } }); +var signer_1 = require("./signer"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } }); +var witness_1 = require("./witness"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } }); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js new file mode 100644 index 0000000000000..f01703cfab564 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const fulcio_1 = require("../../external/fulcio"); +class CAClient { + constructor(options) { + this.fulcio = new fulcio_1.Fulcio({ + baseURL: options.fulcioBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createSigningCertificate(identityToken, publicKey, challenge) { + const request = toCertificateRequest(identityToken, publicKey, challenge); + try { + const resp = await this.fulcio.createSigningCertificate(request); + // Account for the fact that the response may contain either a + // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. + const cert = resp.signedCertificateEmbeddedSct + ? resp.signedCertificateEmbeddedSct + : resp.signedCertificateDetachedSct; + return cert.chain.certificates; + } + catch (err) { + (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate'); + } + } +} +exports.CAClient = CAClient; +function toCertificateRequest(identityToken, publicKey, challenge) { + return { + credentials: { + oidcIdentityToken: identityToken, + }, + publicKeyRequest: { + publicKey: { + algorithm: 'ECDSA', + content: publicKey, + }, + proofOfPossession: challenge.toString('base64'), + }, + }; +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js new file mode 100644 index 0000000000000..481aa5c3579a2 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js @@ -0,0 +1,45 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EphemeralSigner = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const EC_KEYPAIR_TYPE = 'ec'; +const P256_CURVE = 'P-256'; +// Signer implementation which uses an ephemeral keypair to sign artifacts. +// The private key lives only in memory and is tied to the lifetime of the +// EphemeralSigner instance. +class EphemeralSigner { + constructor() { + this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, { + namedCurve: P256_CURVE, + }); + } + async sign(data) { + const signature = crypto_1.default.sign(null, data, this.keypair.privateKey); + const publicKey = this.keypair.publicKey + .export({ format: 'pem', type: 'spki' }) + .toString('ascii'); + return { + signature: signature, + key: { $case: 'publicKey', publicKey }, + }; + } +} +exports.EphemeralSigner = EphemeralSigner; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js new file mode 100644 index 0000000000000..89a432548d2b4 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js @@ -0,0 +1,87 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const util_1 = require("../../util"); +const ca_1 = require("./ca"); +const ephemeral_1 = require("./ephemeral"); +exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; +// Signer implementation which can be used to decorate another signer +// with a Fulcio-issued signing certificate for the signer's public key. +// Must be instantiated with an identity provider which can provide a JWT +// which represents the identity to be bound to the signing certificate. +class FulcioSigner { + constructor(options) { + this.ca = new ca_1.CAClient({ + ...options, + fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL, + }); + this.identityProvider = options.identityProvider; + this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner(); + } + async sign(data) { + // Retrieve identity token from the supplied identity provider + const identityToken = await this.getIdentityToken(); + // Extract challenge claim from OIDC token + let subject; + try { + subject = util_1.oidc.extractJWTSubject(identityToken); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_PARSE_ERROR', + message: `invalid identity token: ${identityToken}`, + cause: err, + }); + } + // Construct challenge value by signing the subject claim + const challenge = await this.keyHolder.sign(Buffer.from(subject)); + if (challenge.key.$case !== 'publicKey') { + throw new error_1.InternalError({ + code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', + message: 'unexpected format for signing key', + }); + } + // Create signing certificate + const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature); + // Generate artifact signature + const signature = await this.keyHolder.sign(data); + // Specifically returning only the first certificate in the chain + // as the key. + return { + signature: signature.signature, + key: { + $case: 'x509Certificate', + certificate: certificates[0], + }, + }; + } + async getIdentityToken() { + try { + return await this.identityProvider.getToken(); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_READ_ERROR', + message: 'error retrieving identity token', + cause: err, + }); + } + } +} +exports.FulcioSigner = FulcioSigner; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js new file mode 100644 index 0000000000000..e2087767b81c1 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js @@ -0,0 +1,22 @@ +"use strict"; +/* istanbul ignore file */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var fulcio_1 = require("./fulcio"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js new file mode 100644 index 0000000000000..b92c54183375d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js @@ -0,0 +1,17 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js new file mode 100644 index 0000000000000..f467c9150c348 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js @@ -0,0 +1,49 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var core_1 = require("@sigstore/core"); +Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } }); +Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } }); +Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } }); +Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } }); +Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } }); +exports.oidc = __importStar(require("./oidc")); +exports.ua = __importStar(require("./ua")); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js new file mode 100644 index 0000000000000..37c5b168ee12e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extractJWTSubject = extractJWTSubject; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +function extractJWTSubject(jwt) { + const parts = jwt.split('.', 3); + const payload = JSON.parse(core_1.encoding.base64Decode(parts[1])); + switch (payload.iss) { + case 'https://accounts.google.com': + case 'https://oauth2.sigstore.dev/auth': + return payload.email; + default: + return payload.sub; + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js new file mode 100644 index 0000000000000..b15ff2070fb9f --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js @@ -0,0 +1,32 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUserAgent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +// Format User-Agent: / () +// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent +const getUserAgent = () => { + const packageVersion = require('../../package.json').version; + const nodeVersion = process.version; + const platformName = os_1.default.platform(); + const archName = os_1.default.arch(); + return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`; +}; +exports.getUserAgent = getUserAgent; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js new file mode 100644 index 0000000000000..72677c399caa7 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js @@ -0,0 +1,24 @@ +"use strict"; +/* istanbul ignore file */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var tlog_1 = require("./tlog"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } }); +var tsa_1 = require("./tsa"); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js new file mode 100644 index 0000000000000..22c895f2ca7ed --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TLogClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const error_2 = require("../../external/error"); +const rekor_1 = require("../../external/rekor"); +class TLogClient { + constructor(options) { + this.fetchOnConflict = options.fetchOnConflict ?? false; + this.rekor = new rekor_1.Rekor({ + baseURL: options.rekorBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createEntry(proposedEntry) { + let entry; + try { + entry = await this.rekor.createEntry(proposedEntry); + } + catch (err) { + // If the entry already exists, fetch it (if enabled) + if (entryExistsError(err) && this.fetchOnConflict) { + // Grab the UUID of the existing entry from the location header + /* istanbul ignore next */ + const uuid = err.location.split('/').pop() || ''; + try { + entry = await this.rekor.getEntry(uuid); + } + catch (err) { + (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry'); + } + } + else { + (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry'); + } + } + return entry; + } +} +exports.TLogClient = TLogClient; +function entryExistsError(value) { + return (value instanceof error_2.HTTPError && + value.statusCode === 409 && + value.location !== undefined); +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js new file mode 100644 index 0000000000000..bb1c68e914b90 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js @@ -0,0 +1,140 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toProposedEntry = toProposedEntry; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("@sigstore/bundle"); +const util_1 = require("../../util"); +const SHA256_ALGORITHM = 'sha256'; +function toProposedEntry(content, publicKey, +// TODO: Remove this parameter once have completely switched to 'dsse' entries +entryType = 'dsse') { + switch (content.$case) { + case 'dsseEnvelope': + // TODO: Remove this conditional once have completely ditched "intoto" entries + if (entryType === 'intoto') { + return toProposedIntotoEntry(content.dsseEnvelope, publicKey); + } + return toProposedDSSEEntry(content.dsseEnvelope, publicKey); + case 'messageSignature': + return toProposedHashedRekordEntry(content.messageSignature, publicKey); + } +} +// Returns a properly formatted Rekor "hashedrekord" entry for the given digest +// and signature +function toProposedHashedRekordEntry(messageSignature, publicKey) { + const hexDigest = messageSignature.messageDigest.digest.toString('hex'); + const b64Signature = messageSignature.signature.toString('base64'); + const b64Key = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'hashedrekord', + spec: { + data: { + hash: { + algorithm: SHA256_ALGORITHM, + value: hexDigest, + }, + }, + signature: { + content: b64Signature, + publicKey: { + content: b64Key, + }, + }, + }, + }; +} +// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope +// and signature +function toProposedDSSEEntry(envelope, publicKey) { + const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope)); + const encodedKey = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'dsse', + spec: { + proposedContent: { + envelope: envelopeJSON, + verifiers: [encodedKey], + }, + }, + }; +} +// Returns a properly formatted Rekor "intoto" entry for the given DSSE +// envelope and signature +function toProposedIntotoEntry(envelope, publicKey) { + // Calculate the value for the payloadHash field in the Rekor entry + const payloadHash = util_1.crypto + .digest(SHA256_ALGORITHM, envelope.payload) + .toString('hex'); + // Calculate the value for the hash field in the Rekor entry + const envelopeHash = calculateDSSEHash(envelope, publicKey); + // Collect values for re-creating the DSSE envelope. + // Double-encode payload and signature cause that's what Rekor expects + const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); + const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64')); + const keyid = envelope.signatures[0].keyid; + const encodedKey = util_1.encoding.base64Encode(publicKey); + // Create the envelope portion of the entry. Note the inclusion of the + // publicKey in the signature struct is not a standard part of a DSSE + // envelope, but is required by Rekor. + const dsse = { + payloadType: envelope.payloadType, + payload: payload, + signatures: [{ sig, publicKey: encodedKey }], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. We + // need to do the same here so that we can properly recreate the entry for + // verification. + if (keyid.length > 0) { + dsse.signatures[0].keyid = keyid; + } + return { + apiVersion: '0.0.2', + kind: 'intoto', + spec: { + content: { + envelope: dsse, + hash: { algorithm: SHA256_ALGORITHM, value: envelopeHash }, + payloadHash: { algorithm: SHA256_ALGORITHM, value: payloadHash }, + }, + }, + }; +} +// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry. +// There is no standard way to do this, so the scheme we're using as as +// follows: +// * payload is base64 encoded +// * signature is base64 encoded (only the first signature is used) +// * keyid is included ONLY if it is NOT an empty string +// * The resulting JSON is canonicalized and hashed to a hex string +function calculateDSSEHash(envelope, publicKey) { + const dsse = { + payloadType: envelope.payloadType, + payload: envelope.payload.toString('base64'), + signatures: [ + { sig: envelope.signatures[0].sig.toString('base64'), publicKey }, + ], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. + if (envelope.signatures[0].keyid.length > 0) { + dsse.signatures[0].keyid = envelope.signatures[0].keyid; + } + return util_1.crypto + .digest(SHA256_ALGORITHM, util_1.json.canonicalize(dsse)) + .toString('hex'); +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js new file mode 100644 index 0000000000000..6197b09d4cdd9 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js @@ -0,0 +1,82 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../../util"); +const client_1 = require("./client"); +const entry_1 = require("./entry"); +exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev'; +class RekorWitness { + constructor(options) { + this.entryType = options.entryType; + this.tlog = new client_1.TLogClient({ + ...options, + rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL, + }); + } + async testify(content, publicKey) { + const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType); + const entry = await this.tlog.createEntry(proposedEntry); + return toTransparencyLogEntry(entry); + } +} +exports.RekorWitness = RekorWitness; +function toTransparencyLogEntry(entry) { + const logID = Buffer.from(entry.logID, 'hex'); + // Parse entry body so we can extract the kind and version. + const bodyJSON = util_1.encoding.base64Decode(entry.body); + const entryBody = JSON.parse(bodyJSON); + const promise = entry?.verification?.signedEntryTimestamp + ? inclusionPromise(entry.verification.signedEntryTimestamp) + : undefined; + const proof = entry?.verification?.inclusionProof + ? inclusionProof(entry.verification.inclusionProof) + : undefined; + const tlogEntry = { + logIndex: entry.logIndex.toString(), + logId: { + keyId: logID, + }, + integratedTime: entry.integratedTime.toString(), + kindVersion: { + kind: entryBody.kind, + version: entryBody.apiVersion, + }, + inclusionPromise: promise, + inclusionProof: proof, + canonicalizedBody: Buffer.from(entry.body, 'base64'), + }; + return { + tlogEntries: [tlogEntry], + }; +} +function inclusionPromise(promise) { + return { + signedEntryTimestamp: Buffer.from(promise, 'base64'), + }; +} +function inclusionProof(proof) { + return { + logIndex: proof.logIndex.toString(), + treeSize: proof.treeSize.toString(), + rootHash: Buffer.from(proof.rootHash, 'hex'), + hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), + checkpoint: { + envelope: proof.checkpoint, + }, + }; +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js new file mode 100644 index 0000000000000..754de3748dbb3 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const tsa_1 = require("../../external/tsa"); +const util_1 = require("../../util"); +const SHA256_ALGORITHM = 'sha256'; +class TSAClient { + constructor(options) { + this.tsa = new tsa_1.TimestampAuthority({ + baseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createTimestamp(signature) { + const request = { + artifactHash: util_1.crypto + .digest(SHA256_ALGORITHM, signature) + .toString('base64'), + hashAlgorithm: SHA256_ALGORITHM, + }; + try { + return await this.tsa.createTimestamp(request); + } + catch (err) { + (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp'); + } + } +} +exports.TSAClient = TSAClient; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js new file mode 100644 index 0000000000000..d4f5c7c859d10 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js @@ -0,0 +1,44 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const client_1 = require("./client"); +class TSAWitness { + constructor(options) { + this.tsa = new client_1.TSAClient({ + tsaBaseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async testify(content) { + const signature = extractSignature(content); + const timestamp = await this.tsa.createTimestamp(signature); + return { + rfc3161Timestamps: [{ signedTimestamp: timestamp }], + }; + } +} +exports.TSAWitness = TSAWitness; +function extractSignature(content) { + switch (content.$case) { + case 'dsseEnvelope': + return content.dsseEnvelope.signatures[0].sig; + case 'messageSignature': + return content.messageSignature.signature; + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/package.json b/node_modules/pacote/node_modules/@sigstore/sign/package.json new file mode 100644 index 0000000000000..fe05e8dc2d73a --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/sign/package.json @@ -0,0 +1,46 @@ +{ + "name": "@sigstore/sign", + "version": "3.0.0", + "description": "Sigstore signing library", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@sigstore/mock": "^0.8.0", + "@sigstore/rekor-types": "^3.0.0", + "@types/make-fetch-happen": "^10.0.4", + "@types/promise-retry": "^1.1.6" + }, + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^14.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE b/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js new file mode 100644 index 0000000000000..06a8143e70da2 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js @@ -0,0 +1,43 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.appDataPath = appDataPath; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +function appDataPath(name) { + const homedir = os_1.default.homedir(); + switch (process.platform) { + /* istanbul ignore next */ + case 'darwin': { + const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); + return path_1.default.join(appSupport, name); + } + /* istanbul ignore next */ + case 'win32': { + const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); + return path_1.default.join(localAppData, name, 'Data'); + } + /* istanbul ignore next */ + default: { + const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); + return path_1.default.join(localData, name); + } + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js new file mode 100644 index 0000000000000..328f49e40dbbd --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js @@ -0,0 +1,111 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const tuf_js_1 = require("tuf-js"); +const _1 = require("."); +const target_1 = require("./target"); +const TARGETS_DIR_NAME = 'targets'; +class TUFClient { + constructor(options) { + const url = new URL(options.mirrorURL); + const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, '')); + const cachePath = path_1.default.join(options.cachePath, repoName); + initTufCache(cachePath); + seedCache({ + cachePath, + mirrorURL: options.mirrorURL, + tufRootPath: options.rootPath, + forceInit: options.forceInit, + }); + this.updater = initClient({ + mirrorURL: options.mirrorURL, + cachePath, + forceCache: options.forceCache, + retry: options.retry, + timeout: options.timeout, + }); + } + async refresh() { + return this.updater.refresh(); + } + getTarget(targetName) { + return (0, target_1.readTarget)(this.updater, targetName); + } +} +exports.TUFClient = TUFClient; +// Initializes the TUF cache directory structure including the initial +// root.json file. If the cache directory does not exist, it will be +// created. If the targets directory does not exist, it will be created. +// If the root.json file does not exist, it will be copied from the +// rootPath argument. +function initTufCache(cachePath) { + const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME); + if (!fs_1.default.existsSync(cachePath)) { + fs_1.default.mkdirSync(cachePath, { recursive: true }); + } + if (!fs_1.default.existsSync(targetsPath)) { + fs_1.default.mkdirSync(targetsPath); + } +} +// Populates the TUF cache with the initial root.json file. If the root.json +// file does not exist (or we're forcing re-initialization), copy it from either +// the rootPath argument or from one of the repo seeds. +function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) { + const cachedRootPath = path_1.default.join(cachePath, 'root.json'); + // If the root.json file does not exist (or we're forcing re-initialization), + // populate it either from the supplied rootPath or from one of the repo seeds. + if (!fs_1.default.existsSync(cachedRootPath) || forceInit) { + if (tufRootPath) { + fs_1.default.copyFileSync(tufRootPath, cachedRootPath); + } + else { + const seeds = require('../seeds.json'); + const repoSeed = seeds[mirrorURL]; + if (!repoSeed) { + throw new _1.TUFError({ + code: 'TUF_INIT_CACHE_ERROR', + message: `No root.json found for mirror: ${mirrorURL}`, + }); + } + fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64')); + // Copy any seed targets into the cache + Object.entries(repoSeed.targets).forEach(([targetName, target]) => { + fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64')); + }); + } + } +} +function initClient(options) { + const config = { + fetchTimeout: options.timeout, + fetchRetry: options.retry, + }; + return new tuf_js_1.Updater({ + metadataBaseUrl: options.mirrorURL, + targetBaseUrl: `${options.mirrorURL}/targets`, + metadataDir: options.cachePath, + targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME), + forceCache: options.forceCache, + config, + }); +} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js new file mode 100644 index 0000000000000..e13971b289ff2 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = void 0; +class TUFError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +exports.TUFError = TUFError; diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js new file mode 100644 index 0000000000000..2af5de93ec5d2 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0; +exports.getTrustedRoot = getTrustedRoot; +exports.initTUF = initTUF; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const appdata_1 = require("./appdata"); +const client_1 = require("./client"); +exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; +const DEFAULT_CACHE_DIR = 'sigstore-js'; +const DEFAULT_RETRY = { retries: 2 }; +const DEFAULT_TIMEOUT = 5000; +const TRUSTED_ROOT_TARGET = 'trusted_root.json'; +async function getTrustedRoot( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); + return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); +} +async function initTUF( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + return client.refresh().then(() => client); +} +// Create a TUF client with default options +function createClient(options) { + /* istanbul ignore next */ + return new client_1.TUFClient({ + cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), + rootPath: options.rootPath, + mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL, + retry: options.retry ?? DEFAULT_RETRY, + timeout: options.timeout ?? DEFAULT_TIMEOUT, + forceCache: options.forceCache ?? false, + forceInit: options.forceInit ?? options.force ?? false, + }); +} +var error_1 = require("./error"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js new file mode 100644 index 0000000000000..5c6675bdfbf5f --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js @@ -0,0 +1,79 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readTarget = readTarget; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const error_1 = require("./error"); +// Downloads and returns the specified target from the provided TUF Updater. +async function readTarget(tuf, targetPath) { + const path = await getTargetPath(tuf, targetPath); + return new Promise((resolve, reject) => { + fs_1.default.readFile(path, 'utf-8', (err, data) => { + if (err) { + reject(new error_1.TUFError({ + code: 'TUF_READ_TARGET_ERROR', + message: `error reading target ${path}`, + cause: err, + })); + } + else { + resolve(data); + } + }); + }); +} +// Returns the local path to the specified target. If the target is not yet +// cached locally, the provided TUF Updater will be used to download and +// cache the target. +async function getTargetPath(tuf, target) { + let targetInfo; + try { + targetInfo = await tuf.getTargetInfo(target); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_REFRESH_METADATA_ERROR', + message: 'error refreshing TUF metadata', + cause: err, + }); + } + if (!targetInfo) { + throw new error_1.TUFError({ + code: 'TUF_FIND_TARGET_ERROR', + message: `target ${target} not found`, + }); + } + let path = await tuf.findCachedTarget(targetInfo); + // An empty path here means the target has not been cached locally, or is + // out of date. In either case, we need to download it. + if (!path) { + try { + path = await tuf.downloadTarget(targetInfo); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_DOWNLOAD_TARGET_ERROR', + message: `error downloading target ${path}`, + cause: err, + }); + } + } + return path; +} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/package.json b/node_modules/pacote/node_modules/@sigstore/tuf/package.json new file mode 100644 index 0000000000000..808689dfddf92 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/package.json @@ -0,0 +1,41 @@ +{ + "name": "@sigstore/tuf", + "version": "3.0.0", + "description": "Client for the Sigstore TUF repository", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "seeds.json" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@tufjs/repo-mock": "^3.0.1", + "@types/make-fetch-happen": "^10.0.4" + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json b/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json new file mode 100644 index 0000000000000..d1d3c6b5c4604 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json @@ -0,0 +1 @@ +{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
 "signatures": [
  {
   "keyid": "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
   "sig": "30460221008ab1f6f17d4f9e6d7dcf1c88912b6b53cc10388644ae1f09bc37a082cd06003e022100e145ef4c7b782d4e8107b53437e669d0476892ce999903ae33d14448366996e7"
  },
  {
   "keyid": "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
   "sig": "3045022100c768b2f86da99569019c160a081da54ae36c34c0a3120d3cb69b53b7d113758e02204f671518f617b20d46537fae6c3b63bae8913f4f1962156105cc4f019ac35c6a"
  },
  {
   "keyid": "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
   "sig": "3045022100b4434e6995d368d23e74759acd0cb9013c83a5d3511f0f997ec54c456ae4350a022015b0e265d182d2b61dc74e155d98b3c3fbe564ba05286aa14c8df02c9b756516"
  },
  {
   "keyid": "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
   "sig": "304502210082c58411d989eb9f861410857d42381590ec9424dbdaa51e78ed13515431904e0220118185da6a6c2947131c17797e2bb7620ce26e5f301d1ceac5f2a7e58f9dcf2e"
  },
  {
   "keyid": "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70",
   "sig": "3046022100c78513854cae9c32eaa6b88e18912f48006c2757a258f917312caba75948eb9e022100d9e1b4ce0adfe9fd2e2148d7fa27a2f40ba1122bd69da7612d8d1776b013c91d"
  },
  {
   "keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
   "sig": "3045022056483a2d5d9ea9cec6e11eadfb33c484b614298faca15acf1c431b11ed7f734c022100d0c1d726af92a87e4e66459ca5adf38a05b44e1f94318423f954bae8bca5bb2e"
  },
  {
   "keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
   "sig": "3046022100d004de88024c32dc5653a9f4843cfc5215427048ad9600d2cf9c969e6edff3d2022100d9ebb798f5fc66af10899dece014a8628ccf3c5402cd4a4270207472f8f6e712"
  },
  {
   "keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
   "sig": "3046022100b7b09996c45ca2d4b05603e56baefa29718a0b71147cf8c6e66349baa61477df022100c4da80c717b4fa7bba0fd5c72da8a0499358b01358b2309f41d1456ea1e7e1d9"
  },
  {
   "keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
   "sig": "3046022100be9782c30744e411a82fa85b5138d601ce148bc19258aec64e7ec24478f38812022100caef63dcaf1a4b9a500d3bd0e3f164ec18f1b63d7a9460d9acab1066db0f016d"
  },
  {
   "keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
   "sig": "30450220746ec3f8534ce55531d0d01ff64964ef440d1e7d2c4c142409b8e9769f1ada6f022100e3b929fcd93ea18feaa0825887a7210489879a66780c07a83f4bd46e2f09ab3b"
  }
 ],
 "signed": {
  "_type": "root",
  "consistent_snapshot": true,
  "expires": "2025-02-19T08:04:32Z",
  "keys": {
   "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@santiagotorres"
   },
   "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@bobcallaway"
   },
   "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@dlorenc"
   },
   "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-online-uri": "gcpkms://projects/sigstore-root-signing/locations/global/keyRings/root/cryptoKeys/timestamp"
   },
   "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@joshuagl"
   },
   "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@mnm678"
   }
  },
  "roles": {
   "root": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "snapshot": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 3650,
    "x-tuf-on-ci-signing-period": 365
   },
   "targets": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "timestamp": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 7,
    "x-tuf-on-ci-signing-period": 4
   }
  },
  "spec_version": "1.0",
  "version": 10,
  "x-tuf-on-ci-expiry-period": 182,
  "x-tuf-on-ci-signing-period": 31
 }
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js new file mode 100644 index 0000000000000..1033fc422aba0 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSESignatureContent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +class DSSESignatureContent { + constructor(env) { + this.env = env; + } + compareDigest(digest) { + return core_1.crypto.bufferEqual(digest, core_1.crypto.digest('sha256', this.env.payload)); + } + compareSignature(signature) { + return core_1.crypto.bufferEqual(signature, this.signature); + } + verifySignature(key) { + return core_1.crypto.verify(this.preAuthEncoding, key, this.signature); + } + get signature() { + return this.env.signatures.length > 0 + ? this.env.signatures[0].sig + : Buffer.from(''); + } + // DSSE Pre-Authentication Encoding + get preAuthEncoding() { + return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload); + } +} +exports.DSSESignatureContent = DSSESignatureContent; diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js new file mode 100644 index 0000000000000..4287d8032b75f --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toSignedEntity = toSignedEntity; +exports.signatureContent = signatureContent; +const core_1 = require("@sigstore/core"); +const dsse_1 = require("./dsse"); +const message_1 = require("./message"); +function toSignedEntity(bundle, artifact) { + const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial; + const timestamps = []; + for (const entry of tlogEntries) { + timestamps.push({ + $case: 'transparency-log', + tlogEntry: entry, + }); + } + for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) { + timestamps.push({ + $case: 'timestamp-authority', + timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp), + }); + } + return { + signature: signatureContent(bundle, artifact), + key: key(bundle), + tlogEntries, + timestamps, + }; +} +function signatureContent(bundle, artifact) { + switch (bundle.content.$case) { + case 'dsseEnvelope': + return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope); + case 'messageSignature': + return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact); + } +} +function key(bundle) { + switch (bundle.verificationMaterial.content.$case) { + case 'publicKey': + return { + $case: 'public-key', + hint: bundle.verificationMaterial.content.publicKey.hint, + }; + case 'x509CertificateChain': + return { + $case: 'certificate', + certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain + .certificates[0].rawBytes), + }; + case 'certificate': + return { + $case: 'certificate', + certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes), + }; + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js new file mode 100644 index 0000000000000..836148c68a8b6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureContent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +class MessageSignatureContent { + constructor(messageSignature, artifact) { + this.signature = messageSignature.signature; + this.messageDigest = messageSignature.messageDigest.digest; + this.artifact = artifact; + } + compareSignature(signature) { + return core_1.crypto.bufferEqual(signature, this.signature); + } + compareDigest(digest) { + return core_1.crypto.bufferEqual(digest, this.messageDigest); + } + verifySignature(key) { + return core_1.crypto.verify(this.artifact, key, this.signature); + } +} +exports.MessageSignatureContent = MessageSignatureContent; diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js new file mode 100644 index 0000000000000..6cb1cd4121343 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PolicyError = exports.VerificationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class BaseError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +class VerificationError extends BaseError { +} +exports.VerificationError = VerificationError; +class PolicyError extends BaseError { +} +exports.PolicyError = PolicyError; diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js new file mode 100644 index 0000000000000..3222876fcd68b --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0; +/* istanbul ignore file */ +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } }); +Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } }); +var trust_1 = require("./trust"); +Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } }); +var verifier_1 = require("./verifier"); +Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js new file mode 100644 index 0000000000000..a916de0e51e71 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js @@ -0,0 +1,205 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CertificateChainVerifier = void 0; +exports.verifyCertificateChain = verifyCertificateChain; +const error_1 = require("../error"); +const trust_1 = require("../trust"); +function verifyCertificateChain(leaf, certificateAuthorities) { + // Filter list of trusted CAs to those which are valid for the given + // leaf certificate. + const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, { + start: leaf.notBefore, + end: leaf.notAfter, + }); + /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ + let error; + for (const ca of cas) { + try { + const verifier = new CertificateChainVerifier({ + trustedCerts: ca.certChain, + untrustedCert: leaf, + }); + return verifier.verify(); + } + catch (err) { + error = err; + } + } + // If we failed to verify the certificate chain for all of the trusted + // CAs, throw the last error we encountered. + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'Failed to verify certificate chain', + cause: error, + }); +} +class CertificateChainVerifier { + constructor(opts) { + this.untrustedCert = opts.untrustedCert; + this.trustedCerts = opts.trustedCerts; + this.localCerts = dedupeCertificates([ + ...opts.trustedCerts, + opts.untrustedCert, + ]); + } + verify() { + // Construct certificate path from leaf to root + const certificatePath = this.sort(); + // Perform validation checks on each certificate in the path + this.checkPath(certificatePath); + // Return verified certificate path + return certificatePath; + } + sort() { + const leafCert = this.untrustedCert; + // Construct all possible paths from the leaf + let paths = this.buildPaths(leafCert); + // Filter for paths which contain a trusted certificate + paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert))); + if (paths.length === 0) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'no trusted certificate path found', + }); + } + // Find the shortest of possible paths + /* istanbul ignore next */ + const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr); + // Construct chain from shortest path + // Removes the last certificate in the path, which will be a second copy + // of the root certificate given that the root is self-signed. + return [leafCert, ...path].slice(0, -1); + } + // Recursively build all possible paths from the leaf to the root + buildPaths(certificate) { + const paths = []; + const issuers = this.findIssuer(certificate); + if (issuers.length === 0) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'no valid certificate path found', + }); + } + for (let i = 0; i < issuers.length; i++) { + const issuer = issuers[i]; + // Base case - issuer is self + if (issuer.equals(certificate)) { + paths.push([certificate]); + continue; + } + // Recursively build path for the issuer + const subPaths = this.buildPaths(issuer); + // Construct paths by appending the issuer to each subpath + for (let j = 0; j < subPaths.length; j++) { + paths.push([issuer, ...subPaths[j]]); + } + } + return paths; + } + // Return all possible issuers for the given certificate + findIssuer(certificate) { + let issuers = []; + let keyIdentifier; + // Exit early if the certificate is self-signed + if (certificate.subject.equals(certificate.issuer)) { + if (certificate.verify()) { + return [certificate]; + } + } + // If the certificate has an authority key identifier, use that + // to find the issuer + if (certificate.extAuthorityKeyID) { + keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier; + // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber + // though Fulcio doesn't appear to use these + } + // Find possible issuers by comparing the authorityKeyID/subjectKeyID + // or issuer/subject. Potential issuers are added to the result array. + this.localCerts.forEach((possibleIssuer) => { + if (keyIdentifier) { + if (possibleIssuer.extSubjectKeyID) { + if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) { + issuers.push(possibleIssuer); + } + return; + } + } + // Fallback to comparing certificate issuer and subject if + // subjectKey/authorityKey extensions are not present + if (possibleIssuer.subject.equals(certificate.issuer)) { + issuers.push(possibleIssuer); + } + }); + // Remove any issuers which fail to verify the certificate + issuers = issuers.filter((issuer) => { + try { + return certificate.verify(issuer); + } + catch (ex) { + /* istanbul ignore next - should never error */ + return false; + } + }); + return issuers; + } + checkPath(path) { + /* istanbul ignore if */ + if (path.length < 1) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'certificate chain must contain at least one certificate', + }); + } + // Ensure that all certificates beyond the leaf are CAs + const validCAs = path.slice(1).every((cert) => cert.isCA); + if (!validCAs) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'intermediate certificate is not a CA', + }); + } + // Certificate's issuer must match the subject of the next certificate + // in the chain + for (let i = path.length - 2; i >= 0; i--) { + /* istanbul ignore if */ + if (!path[i].issuer.equals(path[i + 1].subject)) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'incorrect certificate name chaining', + }); + } + } + // Check pathlength constraints + for (let i = 0; i < path.length; i++) { + const cert = path[i]; + // If the certificate is a CA, check the path length + if (cert.extBasicConstraints?.isCA) { + const pathLength = cert.extBasicConstraints.pathLenConstraint; + // The path length, if set, indicates how many intermediate + // certificates (NOT including the leaf) are allowed to follow. The + // pathLength constraint of any intermediate CA certificate MUST be + // greater than or equal to it's own depth in the chain (with an + // adjustment for the leaf certificate) + if (pathLength !== undefined && pathLength < i - 1) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'path length constraint exceeded', + }); + } + } + } + } +} +exports.CertificateChainVerifier = CertificateChainVerifier; +// Remove duplicate certificates from the array +function dedupeCertificates(certs) { + for (let i = 0; i < certs.length; i++) { + for (let j = i + 1; j < certs.length; j++) { + if (certs[i].equals(certs[j])) { + certs.splice(j, 1); + j--; + } + } + } + return certs; +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js new file mode 100644 index 0000000000000..cc894aab95a5d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js @@ -0,0 +1,72 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyPublicKey = verifyPublicKey; +exports.verifyCertificate = verifyCertificate; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const certificate_1 = require("./certificate"); +const sct_1 = require("./sct"); +const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1'; +const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8'; +function verifyPublicKey(hint, timestamps, trustMaterial) { + const key = trustMaterial.publicKey(hint); + timestamps.forEach((timestamp) => { + if (!key.validFor(timestamp)) { + throw new error_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`, + }); + } + }); + return { key: key.publicKey }; +} +function verifyCertificate(leaf, timestamps, trustMaterial) { + // Check that leaf certificate chains to a trusted CA + const path = (0, certificate_1.verifyCertificateChain)(leaf, trustMaterial.certificateAuthorities); + // Check that ALL certificates are valid for ALL of the timestamps + const validForDate = timestamps.every((timestamp) => path.every((cert) => cert.validForDate(timestamp))); + if (!validForDate) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'certificate is not valid or expired at the specified date', + }); + } + return { + scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs), + signer: getSigner(path[0]), + }; +} +function getSigner(cert) { + let issuer; + const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2); + /* istanbul ignore next */ + if (issuerExtension) { + issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii'); + } + else { + issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii'); + } + const identity = { + extensions: { issuer }, + subjectAlternativeName: cert.subjectAltName, + }; + return { + key: core_1.crypto.createPublicKey(cert.publicKey), + identity, + }; +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js new file mode 100644 index 0000000000000..8eca48738096e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js @@ -0,0 +1,78 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySCTs = verifySCTs; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +function verifySCTs(cert, issuer, ctlogs) { + let extSCT; + // Verifying the SCT requires that we remove the SCT extension and + // re-encode the TBS structure to DER -- this value is part of the data + // over which the signature is calculated. Since this is a destructive action + // we create a copy of the certificate so we can remove the SCT extension + // without affecting the original certificate. + const clone = cert.clone(); + // Intentionally not using the findExtension method here because we want to + // remove the the SCT extension from the certificate before calculating the + // PreCert structure + for (let i = 0; i < clone.extensions.length; i++) { + const ext = clone.extensions[i]; + if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) { + extSCT = new core_1.X509SCTExtension(ext); + // Remove the extension from the certificate + clone.extensions.splice(i, 1); + break; + } + } + // No SCT extension found to verify + if (!extSCT) { + return []; + } + // Found an SCT extension but it has no SCTs + /* istanbul ignore if -- too difficult to fabricate test case for this */ + if (extSCT.signedCertificateTimestamps.length === 0) { + return []; + } + // Construct the PreCert structure + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + const preCert = new core_1.ByteStream(); + // Calculate hash of the issuer's public key + const issuerId = core_1.crypto.digest('sha256', issuer.publicKey); + preCert.appendView(issuerId); + // Re-encodes the certificate to DER after removing the SCT extension + const tbs = clone.tbsCertificate.toDER(); + preCert.appendUint24(tbs.length); + preCert.appendView(tbs); + // Calculate and return the verification results for each SCT + return extSCT.signedCertificateTimestamps.map((sct) => { + // Find the ctlog instance that corresponds to the SCT's logID + const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, { + logID: sct.logID, + targetDate: sct.datetime, + }); + // See if the SCT is valid for any of the CT logs + const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey)); + if (!verified) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'SCT verification failed', + }); + } + return sct.logID; + }); +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js new file mode 100644 index 0000000000000..f5960cf047b84 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySubjectAlternativeName = verifySubjectAlternativeName; +exports.verifyExtensions = verifyExtensions; +const error_1 = require("./error"); +function verifySubjectAlternativeName(policyIdentity, signerIdentity) { + if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) { + throw new error_1.PolicyError({ + code: 'UNTRUSTED_SIGNER_ERROR', + message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`, + }); + } +} +function verifyExtensions(policyExtensions, signerExtensions = {}) { + let key; + for (key in policyExtensions) { + if (signerExtensions[key] !== policyExtensions[key]) { + throw new error_1.PolicyError({ + code: 'UNTRUSTED_SIGNER_ERROR', + message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`, + }); + } + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js new file mode 100644 index 0000000000000..46619b675f886 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js @@ -0,0 +1,157 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyCheckpoint = verifyCheckpoint; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +// Separator between the note and the signatures in a checkpoint +const CHECKPOINT_SEPARATOR = '\n\n'; +// Checkpoint signatures are of the following form: +// "– \n" +// where: +// - the prefix is an emdash (U+2014). +// - gives a human-readable representation of the signing ID. +// - is the first 4 bytes of the SHA256 hash of the +// associated public key followed by the signature bytes. +const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g; +// Verifies the checkpoint value in the given tlog entry. There are two steps +// to the verification: +// 1. Verify that all signatures in the checkpoint can be verified against a +// trusted public key +// 2. Verify that the root hash in the checkpoint matches the root hash in the +// inclusion proof +// See: https://github.com/transparency-dev/formats/blob/main/log/README.md +function verifyCheckpoint(entry, tlogs) { + // Filter tlog instances to just those which were valid at the time of the + // entry + const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { + targetDate: new Date(Number(entry.integratedTime) * 1000), + }); + const inclusionProof = entry.inclusionProof; + const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope); + const checkpoint = LogCheckpoint.fromString(signedNote.note); + // Verify that the signatures in the checkpoint are all valid + if (!verifySignedNote(signedNote, validTLogs)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'invalid checkpoint signature', + }); + } + // Verify that the root hash from the checkpoint matches the root hash in the + // inclusion proof + if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'root hash mismatch', + }); + } +} +// Verifies the signatures in the SignedNote. For each signature, the +// corresponding transparency log is looked up by the key hint and the +// signature is verified against the public key in the transparency log. +// Throws an error if any of the signatures are invalid. +function verifySignedNote(signedNote, tlogs) { + const data = Buffer.from(signedNote.note, 'utf-8'); + return signedNote.signatures.every((signature) => { + // Find the transparency log instance with the matching key hint + const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint)); + if (!tlog) { + return false; + } + return core_1.crypto.verify(data, tlog.publicKey, signature.signature); + }); +} +// SignedNote represents a signed note from a transparency log checkpoint. Consists +// of a body (or note) and one more signatures calculated over the body. See +// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope +class SignedNote { + constructor(note, signatures) { + this.note = note; + this.signatures = signatures; + } + // Deserialize a SignedNote from a string + static fromString(envelope) { + if (!envelope.includes(CHECKPOINT_SEPARATOR)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'missing checkpoint separator', + }); + } + // Split the note into the header and the data portions at the separator + const split = envelope.indexOf(CHECKPOINT_SEPARATOR); + const header = envelope.slice(0, split + 1); + const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length); + // Find all the signature lines in the data portion + const matches = data.matchAll(SIGNATURE_REGEX); + // Parse each of the matched signature lines into the name and signature. + // The first four bytes of the signature are the key hint (should match the + // first four bytes of the log ID), and the rest is the signature itself. + const signatures = Array.from(matches, (match) => { + const [, name, signature] = match; + const sigBytes = Buffer.from(signature, 'base64'); + if (sigBytes.length < 5) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'malformed checkpoint signature', + }); + } + return { + name, + keyHint: sigBytes.subarray(0, 4), + signature: sigBytes.subarray(4), + }; + }); + if (signatures.length === 0) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'no signatures found in checkpoint', + }); + } + return new SignedNote(header, signatures); + } +} +// LogCheckpoint represents a transparency log checkpoint. Consists of the +// following: +// - origin: the name of the transparency log +// - logSize: the size of the log at the time of the checkpoint +// - logHash: the root hash of the log at the time of the checkpoint +// - rest: the rest of the checkpoint body, which is a list of log entries +// See: +// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body +class LogCheckpoint { + constructor(origin, logSize, logHash, rest) { + this.origin = origin; + this.logSize = logSize; + this.logHash = logHash; + this.rest = rest; + } + static fromString(note) { + const lines = note.trimEnd().split('\n'); + if (lines.length < 3) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'too few lines in checkpoint header', + }); + } + const origin = lines[0]; + const logSize = BigInt(lines[1]); + const rootHash = Buffer.from(lines[2], 'base64'); + const rest = lines.slice(3); + return new LogCheckpoint(origin, logSize, rootHash, rest); + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js new file mode 100644 index 0000000000000..56e948de19338 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTSATimestamp = verifyTSATimestamp; +exports.verifyTLogTimestamp = verifyTLogTimestamp; +const error_1 = require("../error"); +const checkpoint_1 = require("./checkpoint"); +const merkle_1 = require("./merkle"); +const set_1 = require("./set"); +const tsa_1 = require("./tsa"); +function verifyTSATimestamp(timestamp, data, timestampAuthorities) { + (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities); + return { + type: 'timestamp-authority', + logID: timestamp.signerSerialNumber, + timestamp: timestamp.signingTime, + }; +} +function verifyTLogTimestamp(entry, tlogAuthorities) { + let inclusionVerified = false; + if (isTLogEntryWithInclusionPromise(entry)) { + (0, set_1.verifyTLogSET)(entry, tlogAuthorities); + inclusionVerified = true; + } + if (isTLogEntryWithInclusionProof(entry)) { + (0, merkle_1.verifyMerkleInclusion)(entry); + (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities); + inclusionVerified = true; + } + if (!inclusionVerified) { + throw new error_1.VerificationError({ + code: 'TLOG_MISSING_INCLUSION_ERROR', + message: 'inclusion could not be verified', + }); + } + return { + type: 'transparency-log', + logID: entry.logId.keyId, + timestamp: new Date(Number(entry.integratedTime) * 1000), + }; +} +function isTLogEntryWithInclusionPromise(entry) { + return entry.inclusionPromise !== undefined; +} +function isTLogEntryWithInclusionProof(entry) { + return entry.inclusionProof !== undefined; +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js new file mode 100644 index 0000000000000..f57cae42002bd --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyMerkleInclusion = verifyMerkleInclusion; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]); +const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]); +function verifyMerkleInclusion(entry) { + const inclusionProof = entry.inclusionProof; + const logIndex = BigInt(inclusionProof.logIndex); + const treeSize = BigInt(inclusionProof.treeSize); + if (logIndex < 0n || logIndex >= treeSize) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: `invalid index: ${logIndex}`, + }); + } + // Figure out which subset of hashes corresponds to the inner and border + // nodes + const { inner, border } = decompInclProof(logIndex, treeSize); + if (inclusionProof.hashes.length !== inner + border) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'invalid hash count', + }); + } + const innerHashes = inclusionProof.hashes.slice(0, inner); + const borderHashes = inclusionProof.hashes.slice(inner); + // The entry's hash is the leaf hash + const leafHash = hashLeaf(entry.canonicalizedBody); + // Chain the hashes belonging to the inner and border portions + const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes); + // Calculated hash should match the root hash in the inclusion proof + if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'calculated root hash does not match inclusion proof', + }); + } +} +// Breaks down inclusion proof for a leaf at the specified index in a tree of +// the specified size. The split point is where paths to the index leaf and +// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof +// parts. +function decompInclProof(index, size) { + const inner = innerProofSize(index, size); + const border = onesCount(index >> BigInt(inner)); + return { inner, border }; +} +// Computes a subtree hash for a node on or below the tree's right border. +// Assumes the provided proof hashes are ordered from lower to higher levels +// and seed is the initial hash of the node specified by the index. +function chainInner(seed, hashes, index) { + return hashes.reduce((acc, h, i) => { + if ((index >> BigInt(i)) & BigInt(1)) { + return hashChildren(h, acc); + } + else { + return hashChildren(acc, h); + } + }, seed); +} +// Computes a subtree hash for nodes along the tree's right border. +function chainBorderRight(seed, hashes) { + return hashes.reduce((acc, h) => hashChildren(h, acc), seed); +} +function innerProofSize(index, size) { + return bitLength(index ^ (size - BigInt(1))); +} +// Counts the number of ones in the binary representation of the given number. +// https://en.wikipedia.org/wiki/Hamming_weight +function onesCount(num) { + return num.toString(2).split('1').length - 1; +} +// Returns the number of bits necessary to represent an integer in binary. +function bitLength(n) { + if (n === 0n) { + return 0; + } + return n.toString(2).length; +} +// Hashing logic according to RFC6962. +// https://datatracker.ietf.org/doc/html/rfc6962#section-2 +function hashChildren(left, right) { + return core_1.crypto.digest('sha256', RFC6962_NODE_HASH_PREFIX, left, right); +} +function hashLeaf(leaf) { + return core_1.crypto.digest('sha256', RFC6962_LEAF_HASH_PREFIX, leaf); +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js new file mode 100644 index 0000000000000..5d3f47bb88746 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js @@ -0,0 +1,60 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTLogSET = verifyTLogSET; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +// Verifies the SET for the given entry against the list of trusted +// transparency logs. Returns true if the SET can be verified against at least +// one of the trusted logs; otherwise, returns false. +function verifyTLogSET(entry, tlogs) { + // Filter the list of tlog instances to only those which might be able to + // verify the SET + const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { + logID: entry.logId.keyId, + targetDate: new Date(Number(entry.integratedTime) * 1000), + }); + // Check to see if we can verify the SET against any of the valid tlogs + const verified = validTLogs.some((tlog) => { + // Re-create the original Rekor verification payload + const payload = toVerificationPayload(entry); + // Canonicalize the payload and turn into a buffer for verification + const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8'); + // Extract the SET from the tlog entry + const signature = entry.inclusionPromise.signedEntryTimestamp; + return core_1.crypto.verify(data, tlog.publicKey, signature); + }); + if (!verified) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROMISE_ERROR', + message: 'inclusion promise could not be verified', + }); + } +} +// Returns a properly formatted "VerificationPayload" for one of the +// transaction log entires in the given bundle which can be used for SET +// verification. +function toVerificationPayload(entry) { + const { integratedTime, logIndex, logId, canonicalizedBody } = entry; + return { + body: canonicalizedBody.toString('base64'), + integratedTime: Number(integratedTime), + logIndex: Number(logIndex), + logID: logId.keyId.toString('hex'), + }; +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js new file mode 100644 index 0000000000000..70388cd06c52d --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js @@ -0,0 +1,73 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp; +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const certificate_1 = require("../key/certificate"); +const trust_1 = require("../trust"); +function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) { + const signingTime = timestamp.signingTime; + // Filter for CAs which were valid at the time of signing + timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, { + start: signingTime, + end: signingTime, + }); + // Filter for CAs which match serial and issuer embedded in the timestamp + timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, { + serialNumber: timestamp.signerSerialNumber, + issuer: timestamp.signerIssuer, + }); + // Check that we can verify the timestamp with AT LEAST ONE of the remaining + // CAs + const verified = timestampAuthorities.some((ca) => { + try { + verifyTimestampForCA(timestamp, data, ca); + return true; + } + catch (e) { + return false; + } + }); + if (!verified) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'timestamp could not be verified', + }); + } +} +function verifyTimestampForCA(timestamp, data, ca) { + const [leaf, ...cas] = ca.certChain; + const signingKey = core_1.crypto.createPublicKey(leaf.publicKey); + const signingTime = timestamp.signingTime; + // Verify the certificate chain for the provided CA + try { + new certificate_1.CertificateChainVerifier({ + untrustedCert: leaf, + trustedCerts: cas, + }).verify(); + } + catch (e) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'invalid certificate chain', + }); + } + // Check that all of the CA certs were valid at the time of signing + const validAtSigningTime = ca.certChain.every((cert) => cert.validForDate(signingTime)); + if (!validAtSigningTime) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'timestamp was signed with an expired certificate', + }); + } + // Check that the signing certificate's key can be used to verify the + // timestamp signature. + timestamp.verify(data, signingKey); +} +// Filters the list of CAs to those which have a leaf signing certificate which +// matches the given serial number and issuer. +function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) { + return timestampAuthorities.filter((ca) => ca.certChain.length > 0 && + core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) && + core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer)); +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js new file mode 100644 index 0000000000000..d71ed8c6e7ad9 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyDSSETLogBody = verifyDSSETLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given intoto tlog entry to the given bundle +function verifyDSSETLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.1': + return verifyDSSE001TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported dsse version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope. +function verifyDSSE001TLogBody(tlogEntry, content) { + // Ensure the bundle's DSSE only contains a single signature + if (tlogEntry.spec.signatures?.length !== 1) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature count mismatch', + }); + } + const tlogSig = tlogEntry.spec.signatures[0].signature; + // Ensure that the signature in the bundle's DSSE matches tlog entry + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'tlog entry signature mismatch', + }); + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const tlogHash = tlogEntry.spec.payloadHash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'DSSE payload hash mismatch', + }); + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js new file mode 100644 index 0000000000000..c4aa345b57ba7 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given hashedrekord tlog entry to the given bundle +function verifyHashedRekordTLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.1': + return verifyHashedrekord001TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given hashedrekord v0.0.1 tlog entry to the given message +// signature +function verifyHashedrekord001TLogBody(tlogEntry, content) { + // Ensure that the bundles message signature matches the tlog entry + const tlogSig = tlogEntry.spec.signature.content || ''; + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature mismatch', + }); + } + // Ensure that the bundle's message digest matches the tlog entry + const tlogDigest = tlogEntry.spec.data.hash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'digest mismatch', + }); + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js new file mode 100644 index 0000000000000..da235360c594a --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTLogBody = verifyTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +const dsse_1 = require("./dsse"); +const hashedrekord_1 = require("./hashedrekord"); +const intoto_1 = require("./intoto"); +// Verifies that the given tlog entry matches the supplied signature content. +function verifyTLogBody(entry, sigContent) { + const { kind, version } = entry.kindVersion; + const body = JSON.parse(entry.canonicalizedBody.toString('utf8')); + if (kind !== body.kind || version !== body.apiVersion) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`, + }); + } + switch (body.kind) { + case 'dsse': + return (0, dsse_1.verifyDSSETLogBody)(body, sigContent); + case 'intoto': + return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent); + case 'hashedrekord': + return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent); + /* istanbul ignore next */ + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported kind: ${kind}`, + }); + } +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js new file mode 100644 index 0000000000000..9096ae9418cc3 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyIntotoTLogBody = verifyIntotoTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given intoto tlog entry to the given bundle +function verifyIntotoTLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.2': + return verifyIntoto002TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported intoto version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope. +function verifyIntoto002TLogBody(tlogEntry, content) { + // Ensure the bundle's DSSE contains a single signature + if (tlogEntry.spec.content.envelope.signatures?.length !== 1) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature count mismatch', + }); + } + // Signature is double-base64-encoded in the tlog entry + const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig); + // Ensure that the signature in the bundle's DSSE matches tlog entry + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'tlog entry signature mismatch', + }); + } + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const tlogHash = tlogEntry.spec.content.payloadHash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'DSSE payload hash mismatch', + }); + } +} +function base64Decode(str) { + return Buffer.from(str, 'base64').toString('utf-8'); +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js new file mode 100644 index 0000000000000..880a16cf1940e --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.filterCertAuthorities = filterCertAuthorities; +exports.filterTLogAuthorities = filterTLogAuthorities; +function filterCertAuthorities(certAuthorities, criteria) { + return certAuthorities.filter((ca) => { + return (ca.validFor.start <= criteria.start && ca.validFor.end >= criteria.end); + }); +} +// Filter the list of tlog instances to only those which match the given log +// ID and have public keys which are valid for the given integrated time. +function filterTLogAuthorities(tlogAuthorities, criteria) { + return tlogAuthorities.filter((tlog) => { + // If we're filtering by log ID and the log IDs don't match, we can't use + // this tlog + if (criteria.logID && !tlog.logID.equals(criteria.logID)) { + return false; + } + // Check that the integrated time is within the validFor range + return (tlog.validFor.start <= criteria.targetDate && + criteria.targetDate <= tlog.validFor.end); + }); +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js new file mode 100644 index 0000000000000..bfab2eb4f9975 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js @@ -0,0 +1,86 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0; +exports.toTrustMaterial = toTrustMaterial; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const error_1 = require("../error"); +const BEGINNING_OF_TIME = new Date(0); +const END_OF_TIME = new Date(8640000000000000); +var filter_1 = require("./filter"); +Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } }); +Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } }); +function toTrustMaterial(root, keys) { + const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys); + return { + certificateAuthorities: root.certificateAuthorities.map(createCertAuthority), + timestampAuthorities: root.timestampAuthorities.map(createCertAuthority), + tlogs: root.tlogs.map(createTLogAuthority), + ctlogs: root.ctlogs.map(createTLogAuthority), + publicKey: keyFinder, + }; +} +function createTLogAuthority(tlogInstance) { + const keyDetails = tlogInstance.publicKey.keyDetails; + const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256 + ? 'pkcs1' + : 'spki'; + return { + logID: tlogInstance.logId.keyId, + publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType), + validFor: { + start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME, + end: tlogInstance.publicKey.validFor?.end || END_OF_TIME, + }, + }; +} +function createCertAuthority(ca) { + /* istanbul ignore next */ + return { + certChain: ca.certChain.certificates.map((cert) => { + return core_1.X509Certificate.parse(cert.rawBytes); + }), + validFor: { + start: ca.validFor?.start || BEGINNING_OF_TIME, + end: ca.validFor?.end || END_OF_TIME, + }, + }; +} +function keyLocator(keys) { + return (hint) => { + const key = (keys || {})[hint]; + if (!key) { + throw new error_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `key not found: ${hint}`, + }); + } + return { + publicKey: core_1.crypto.createPublicKey(key.rawBytes), + validFor: (date) => { + /* istanbul ignore next */ + return ((key.validFor?.start || BEGINNING_OF_TIME) <= date && + (key.validFor?.end || END_OF_TIME) >= date); + }, + }; + }; +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js new file mode 100644 index 0000000000000..829727cd1d40a --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js @@ -0,0 +1,141 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Verifier = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("util"); +const error_1 = require("./error"); +const key_1 = require("./key"); +const policy_1 = require("./policy"); +const timestamp_1 = require("./timestamp"); +const tlog_1 = require("./tlog"); +class Verifier { + constructor(trustMaterial, options = {}) { + this.trustMaterial = trustMaterial; + this.options = { + ctlogThreshold: options.ctlogThreshold ?? 1, + tlogThreshold: options.tlogThreshold ?? 1, + tsaThreshold: options.tsaThreshold ?? 0, + }; + } + verify(entity, policy) { + const timestamps = this.verifyTimestamps(entity); + const signer = this.verifySigningKey(entity, timestamps); + this.verifyTLogs(entity); + this.verifySignature(entity, signer); + if (policy) { + this.verifyPolicy(policy, signer.identity || {}); + } + return signer; + } + // Checks that all of the timestamps in the entity are valid and returns them + verifyTimestamps(entity) { + let tlogCount = 0; + let tsaCount = 0; + const timestamps = entity.timestamps.map((timestamp) => { + switch (timestamp.$case) { + case 'timestamp-authority': + tsaCount++; + return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities); + case 'transparency-log': + tlogCount++; + return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs); + } + }); + // Check for duplicate timestamps + if (containsDupes(timestamps)) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'duplicate timestamp', + }); + } + if (tlogCount < this.options.tlogThreshold) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`, + }); + } + if (tsaCount < this.options.tsaThreshold) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`, + }); + } + return timestamps.map((t) => t.timestamp); + } + // Checks that the signing key is valid for all of the the supplied timestamps + // and returns the signer. + verifySigningKey({ key }, timestamps) { + switch (key.$case) { + case 'public-key': { + return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial); + } + case 'certificate': { + const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial); + /* istanbul ignore next - no fixture */ + if (containsDupes(result.scts)) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'duplicate SCT', + }); + } + if (result.scts.length < this.options.ctlogThreshold) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`, + }); + } + return result.signer; + } + } + } + // Checks that the tlog entries are valid for the supplied content + verifyTLogs({ signature: content, tlogEntries }) { + tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content)); + } + // Checks that the signature is valid for the supplied content + verifySignature(entity, signer) { + if (!entity.signature.verifySignature(signer.key)) { + throw new error_1.VerificationError({ + code: 'SIGNATURE_ERROR', + message: 'signature verification failed', + }); + } + } + verifyPolicy(policy, identity) { + // Check the subject alternative name of the signer matches the policy + if (policy.subjectAlternativeName) { + (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName); + } + // Check that the extensions of the signer match the policy + if (policy.extensions) { + (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions); + } + } +} +exports.Verifier = Verifier; +// Checks for duplicate items in the array. Objects are compared using +// deep equality. +function containsDupes(arr) { + for (let i = 0; i < arr.length; i++) { + for (let j = i + 1; j < arr.length; j++) { + if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) { + return true; + } + } + } + return false; +} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/package.json b/node_modules/pacote/node_modules/@sigstore/verify/package.json new file mode 100644 index 0000000000000..edf72b8bfd968 --- /dev/null +++ b/node_modules/pacote/node_modules/@sigstore/verify/package.json @@ -0,0 +1,36 @@ +{ + "name": "@sigstore/verify", + "version": "2.0.0", + "description": "Verification of Sigstore signatures", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/LICENSE b/node_modules/pacote/node_modules/@tufjs/models/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/base.js b/node_modules/pacote/node_modules/@tufjs/models/dist/base.js new file mode 100644 index 0000000000000..85e45d8fc1151 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/base.js @@ -0,0 +1,92 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signed = exports.MetadataKind = void 0; +exports.isMetadataKind = isMetadataKind; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const SPECIFICATION_VERSION = ['1', '0', '31']; +var MetadataKind; +(function (MetadataKind) { + MetadataKind["Root"] = "root"; + MetadataKind["Timestamp"] = "timestamp"; + MetadataKind["Snapshot"] = "snapshot"; + MetadataKind["Targets"] = "targets"; +})(MetadataKind || (exports.MetadataKind = MetadataKind = {})); +function isMetadataKind(value) { + return (typeof value === 'string' && + Object.values(MetadataKind).includes(value)); +} +/*** + * A base class for the signed part of TUF metadata. + * + * Objects with base class Signed are usually included in a ``Metadata`` object + * on the signed attribute. This class provides attributes and methods that + * are common for all TUF metadata types (roles). + */ +class Signed { + constructor(options) { + this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.'); + const specList = this.specVersion.split('.'); + if (!(specList.length === 2 || specList.length === 3) || + !specList.every((item) => isNumeric(item))) { + throw new error_1.ValueError('Failed to parse specVersion'); + } + // major version must match + if (specList[0] != SPECIFICATION_VERSION[0]) { + throw new error_1.ValueError('Unsupported specVersion'); + } + this.expires = options.expires; + this.version = options.version; + this.unrecognizedFields = options.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Signed)) { + return false; + } + return (this.specVersion === other.specVersion && + this.expires === other.expires && + this.version === other.version && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + isExpired(referenceTime) { + if (!referenceTime) { + referenceTime = new Date(); + } + return referenceTime >= new Date(this.expires); + } + static commonFieldsFromJSON(data) { + const { spec_version, expires, version, ...rest } = data; + if (!utils_1.guard.isDefined(spec_version)) { + throw new error_1.ValueError('spec_version is not defined'); + } + else if (typeof spec_version !== 'string') { + throw new TypeError('spec_version must be a string'); + } + if (!utils_1.guard.isDefined(expires)) { + throw new error_1.ValueError('expires is not defined'); + } + else if (!(typeof expires === 'string')) { + throw new TypeError('expires must be a string'); + } + if (!utils_1.guard.isDefined(version)) { + throw new error_1.ValueError('version is not defined'); + } + else if (!(typeof version === 'number')) { + throw new TypeError('version must be a number'); + } + return { + specVersion: spec_version, + expires, + version, + unrecognizedFields: rest, + }; + } +} +exports.Signed = Signed; +function isNumeric(str) { + return !isNaN(Number(str)); +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js b/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js new file mode 100644 index 0000000000000..7165f1e244393 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js @@ -0,0 +1,115 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Delegations = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container object storing information about all delegations. + * + * Targets roles that are trusted to provide signed metadata files + * describing targets with designated pathnames and/or further delegations. + */ +class Delegations { + constructor(options) { + this.keys = options.keys; + this.unrecognizedFields = options.unrecognizedFields || {}; + if (options.roles) { + if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) { + throw new error_1.ValueError('Delegated role name conflicts with top-level role name'); + } + } + this.succinctRoles = options.succinctRoles; + this.roles = options.roles; + } + equals(other) { + if (!(other instanceof Delegations)) { + return false; + } + return (util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) && + util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles)); + } + *rolesForTarget(targetPath) { + if (this.roles) { + for (const role of Object.values(this.roles)) { + if (role.isDelegatedPath(targetPath)) { + yield { role: role.name, terminating: role.terminating }; + } + } + } + else if (this.succinctRoles) { + yield { + role: this.succinctRoles.getRoleForTarget(targetPath), + terminating: true, + }; + } + } + toJSON() { + const json = { + keys: keysToJSON(this.keys), + ...this.unrecognizedFields, + }; + if (this.roles) { + json.roles = rolesToJSON(this.roles); + } + else if (this.succinctRoles) { + json.succinct_roles = this.succinctRoles.toJSON(); + } + return json; + } + static fromJSON(data) { + const { keys, roles, succinct_roles, ...unrecognizedFields } = data; + let succinctRoles; + if (utils_1.guard.isObject(succinct_roles)) { + succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); + } + return new Delegations({ + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + unrecognizedFields, + succinctRoles, + }); + } +} +exports.Delegations = Delegations; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyId, key]) => ({ + ...acc, + [keyId]: key.toJSON(), + }), {}); +} +function rolesToJSON(roles) { + return Object.values(roles).map((role) => role.toJSON()); +} +function keysFromJSON(data) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys is malformed'); + } + return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); +} +function rolesFromJSON(data) { + let roleMap; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectArray(data)) { + throw new TypeError('roles is malformed'); + } + roleMap = data.reduce((acc, role) => { + const delegatedRole = role_1.DelegatedRole.fromJSON(role); + return { + ...acc, + [delegatedRole.name]: delegatedRole, + }; + }, {}); + } + return roleMap; +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/error.js b/node_modules/pacote/node_modules/@tufjs/models/dist/error.js new file mode 100644 index 0000000000000..ba80698747ba0 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/error.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error about metadata object with insufficient threshold of signatures. +class UnsignedMetadataError extends RepositoryError { +} +exports.UnsignedMetadataError = UnsignedMetadataError; +// An error while checking the length and hash values of an object. +class LengthOrHashMismatchError extends RepositoryError { +} +exports.LengthOrHashMismatchError = LengthOrHashMismatchError; +class CryptoError extends Error { +} +exports.CryptoError = CryptoError; +class UnsupportedAlgorithmError extends CryptoError { +} +exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/file.js b/node_modules/pacote/node_modules/@tufjs/models/dist/file.js new file mode 100644 index 0000000000000..b35fe5950bbb7 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/file.js @@ -0,0 +1,183 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TargetFile = exports.MetaFile = void 0; +const crypto_1 = __importDefault(require("crypto")); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +// A container with information about a particular metadata file. +// +// This class is used for Timestamp and Snapshot metadata. +class MetaFile { + constructor(opts) { + if (opts.version <= 0) { + throw new error_1.ValueError('Metafile version must be at least 1'); + } + if (opts.length !== undefined) { + validateLength(opts.length); + } + this.version = opts.version; + this.length = opts.length; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof MetaFile)) { + return false; + } + return (this.version === other.version && + this.length === other.length && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + verify(data) { + // Verifies that the given data matches the expected length. + if (this.length !== undefined) { + if (data.length !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`); + } + } + // Verifies that the given data matches the supplied hashes. + if (this.hashes) { + Object.entries(this.hashes).forEach(([key, value]) => { + let hash; + try { + hash = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + const observedHash = hash.update(data).digest('hex'); + if (observedHash !== value) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`); + } + }); + } + } + toJSON() { + const json = { + version: this.version, + ...this.unrecognizedFields, + }; + if (this.length !== undefined) { + json.length = this.length; + } + if (this.hashes) { + json.hashes = this.hashes; + } + return json; + } + static fromJSON(data) { + const { version, length, hashes, ...rest } = data; + if (typeof version !== 'number') { + throw new TypeError('version must be a number'); + } + if (utils_1.guard.isDefined(length) && typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must be string keys and values'); + } + return new MetaFile({ + version, + length, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.MetaFile = MetaFile; +// Container for info about a particular target file. +// +// This class is used for Target metadata. +class TargetFile { + constructor(opts) { + validateLength(opts.length); + this.length = opts.length; + this.path = opts.path; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + get custom() { + const custom = this.unrecognizedFields['custom']; + if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) { + return {}; + } + return custom; + } + equals(other) { + if (!(other instanceof TargetFile)) { + return false; + } + return (this.length === other.length && + this.path === other.path && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + async verify(stream) { + let observedLength = 0; + // Create a digest for each hash algorithm + const digests = Object.keys(this.hashes).reduce((acc, key) => { + try { + acc[key] = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + return acc; + }, {}); + // Read stream chunk by chunk + for await (const chunk of stream) { + // Keep running tally of stream length + observedLength += chunk.length; + // Append chunk to each digest + Object.values(digests).forEach((digest) => { + digest.update(chunk); + }); + } + // Verify length matches expected value + if (observedLength !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`); + } + // Verify each digest matches expected value + Object.entries(digests).forEach(([key, value]) => { + const expected = this.hashes[key]; + const actual = value.digest('hex'); + if (actual !== expected) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`); + } + }); + } + toJSON() { + return { + length: this.length, + hashes: this.hashes, + ...this.unrecognizedFields, + }; + } + static fromJSON(path, data) { + const { length, hashes, ...rest } = data; + if (typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (!utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must have string keys and values'); + } + return new TargetFile({ + length, + path, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.TargetFile = TargetFile; +// Check that supplied length if valid +function validateLength(length) { + if (length < 0) { + throw new error_1.ValueError('Length must be at least 0'); + } +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/index.js b/node_modules/pacote/node_modules/@tufjs/models/dist/index.js new file mode 100644 index 0000000000000..a4dc783659f04 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; +var base_1 = require("./base"); +Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); +var file_1 = require("./file"); +Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); +var key_1 = require("./key"); +Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); +var metadata_1 = require("./metadata"); +Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); +var root_1 = require("./root"); +Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); +var signature_1 = require("./signature"); +Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); +var snapshot_1 = require("./snapshot"); +Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); +var targets_1 = require("./targets"); +Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/key.js b/node_modules/pacote/node_modules/@tufjs/models/dist/key.js new file mode 100644 index 0000000000000..5e55b09d7c6dd --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/key.js @@ -0,0 +1,85 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Key = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const key_1 = require("./utils/key"); +// A container class representing the public portion of a Key. +class Key { + constructor(options) { + const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options; + this.keyID = keyID; + this.keyType = keyType; + this.scheme = scheme; + this.keyVal = keyVal; + this.unrecognizedFields = unrecognizedFields || {}; + } + // Verifies the that the metadata.signatures contains a signature made with + // this key and is correctly signed. + verifySignature(metadata) { + const signature = metadata.signatures[this.keyID]; + if (!signature) + throw new error_1.UnsignedMetadataError('no signature for key found in metadata'); + if (!this.keyVal.public) + throw new error_1.UnsignedMetadataError('no public key found'); + const publicKey = (0, key_1.getPublicKey)({ + keyType: this.keyType, + scheme: this.scheme, + keyVal: this.keyVal.public, + }); + const signedData = metadata.signed.toJSON(); + try { + if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + catch (error) { + if (error instanceof error_1.UnsignedMetadataError) { + throw error; + } + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + equals(other) { + if (!(other instanceof Key)) { + return false; + } + return (this.keyID === other.keyID && + this.keyType === other.keyType && + this.scheme === other.scheme && + util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keytype: this.keyType, + scheme: this.scheme, + keyval: this.keyVal, + ...this.unrecognizedFields, + }; + } + static fromJSON(keyID, data) { + const { keytype, scheme, keyval, ...rest } = data; + if (typeof keytype !== 'string') { + throw new TypeError('keytype must be a string'); + } + if (typeof scheme !== 'string') { + throw new TypeError('scheme must be a string'); + } + if (!utils_1.guard.isStringRecord(keyval)) { + throw new TypeError('keyval must be a string record'); + } + return new Key({ + keyID, + keyType: keytype, + scheme, + keyVal: keyval, + unrecognizedFields: rest, + }); + } +} +exports.Key = Key; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js b/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js new file mode 100644 index 0000000000000..389d2504e0b53 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js @@ -0,0 +1,160 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Metadata = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const root_1 = require("./root"); +const signature_1 = require("./signature"); +const snapshot_1 = require("./snapshot"); +const targets_1 = require("./targets"); +const timestamp_1 = require("./timestamp"); +const utils_1 = require("./utils"); +/*** + * A container for signed TUF metadata. + * + * Provides methods to convert to and from json, read and write to and + * from JSON and to create and verify metadata signatures. + * + * ``Metadata[T]`` is a generic container type where T can be any one type of + * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this + * is to allow static type checking of the signed attribute in code using + * Metadata:: + * + * root_md = Metadata[Root].fromJSON("root.json") + * # root_md type is now Metadata[Root]. This means signed and its + * # attributes like consistent_snapshot are now statically typed and the + * # types can be verified by static type checkers and shown by IDEs + * + * Using a type constraint is not required but not doing so means T is not a + * specific type so static typing cannot happen. Note that the type constraint + * ``[Root]`` is not validated at runtime (as pure annotations are not available + * then). + * + * Apart from ``expires`` all of the arguments to the inner constructors have + * reasonable default values for new metadata. + */ +class Metadata { + constructor(signed, signatures, unrecognizedFields) { + this.signed = signed; + this.signatures = signatures || {}; + this.unrecognizedFields = unrecognizedFields || {}; + } + sign(signer, append = true) { + const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON())); + const signature = signer(bytes); + if (!append) { + this.signatures = {}; + } + this.signatures[signature.keyID] = signature; + } + verifyDelegate(delegatedRole, delegatedMetadata) { + let role; + let keys = {}; + switch (this.signed.type) { + case base_1.MetadataKind.Root: + keys = this.signed.keys; + role = this.signed.roles[delegatedRole]; + break; + case base_1.MetadataKind.Targets: + if (!this.signed.delegations) { + throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); + } + keys = this.signed.delegations.keys; + if (this.signed.delegations.roles) { + role = this.signed.delegations.roles[delegatedRole]; + } + else if (this.signed.delegations.succinctRoles) { + if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) { + role = this.signed.delegations.succinctRoles; + } + } + break; + default: + throw new TypeError('invalid metadata type'); + } + if (!role) { + throw new error_1.ValueError(`no delegation found for ${delegatedRole}`); + } + const signingKeys = new Set(); + role.keyIDs.forEach((keyID) => { + const key = keys[keyID]; + // If we dont' have the key, continue checking other keys + if (!key) { + return; + } + try { + key.verifySignature(delegatedMetadata); + signingKeys.add(key.keyID); + } + catch (error) { + // continue + } + }); + if (signingKeys.size < role.threshold) { + throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`); + } + } + equals(other) { + if (!(other instanceof Metadata)) { + return false; + } + return (this.signed.equals(other.signed) && + util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + const signatures = Object.values(this.signatures).map((signature) => { + return signature.toJSON(); + }); + return { + signatures, + signed: this.signed.toJSON(), + ...this.unrecognizedFields, + }; + } + static fromJSON(type, data) { + const { signed, signatures, ...rest } = data; + if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { + throw new TypeError('signed is not defined'); + } + if (type !== signed._type) { + throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`); + } + if (!utils_1.guard.isObjectArray(signatures)) { + throw new TypeError('signatures is not an array'); + } + let signedObj; + switch (type) { + case base_1.MetadataKind.Root: + signedObj = root_1.Root.fromJSON(signed); + break; + case base_1.MetadataKind.Timestamp: + signedObj = timestamp_1.Timestamp.fromJSON(signed); + break; + case base_1.MetadataKind.Snapshot: + signedObj = snapshot_1.Snapshot.fromJSON(signed); + break; + case base_1.MetadataKind.Targets: + signedObj = targets_1.Targets.fromJSON(signed); + break; + default: + throw new TypeError('invalid metadata type'); + } + const sigMap = {}; + // Ensure that each signature is unique + signatures.forEach((sigData) => { + const sig = signature_1.Signature.fromJSON(sigData); + if (sigMap[sig.keyID]) { + throw new error_1.ValueError(`multiple signatures found for keyid: ${sig.keyID}`); + } + sigMap[sig.keyID] = sig; + }); + return new Metadata(signedObj, sigMap, rest); + } +} +exports.Metadata = Metadata; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/role.js b/node_modules/pacote/node_modules/@tufjs/models/dist/role.js new file mode 100644 index 0000000000000..f7ddbc6fe3f38 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/role.js @@ -0,0 +1,299 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0; +const crypto_1 = __importDefault(require("crypto")); +const minimatch_1 = require("minimatch"); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +exports.TOP_LEVEL_ROLE_NAMES = [ + 'root', + 'targets', + 'snapshot', + 'timestamp', +]; +/** + * Container that defines which keys are required to sign roles metadata. + * + * Role defines how many keys are required to successfully sign the roles + * metadata, and which keys are accepted. + */ +class Role { + constructor(options) { + const { keyIDs, threshold, unrecognizedFields } = options; + if (hasDuplicates(keyIDs)) { + throw new error_1.ValueError('duplicate key IDs found'); + } + if (threshold < 1) { + throw new error_1.ValueError('threshold must be at least 1'); + } + this.keyIDs = keyIDs; + this.threshold = threshold; + this.unrecognizedFields = unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Role)) { + return false; + } + return (this.threshold === other.threshold && + util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keyids: this.keyIDs, + threshold: this.threshold, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { keyids, threshold, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + return new Role({ + keyIDs: keyids, + threshold, + unrecognizedFields: rest, + }); + } +} +exports.Role = Role; +function hasDuplicates(array) { + return new Set(array).size !== array.length; +} +/** + * A container with information about a delegated role. + * + * A delegation can happen in two ways: + * - ``paths`` is set: delegates targets matching any path pattern in ``paths`` + * - ``pathHashPrefixes`` is set: delegates targets whose target path hash + * starts with any of the prefixes in ``pathHashPrefixes`` + * + * ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be + * set, at least one of them must be set. + */ +class DelegatedRole extends Role { + constructor(opts) { + super(opts); + const { name, terminating, paths, pathHashPrefixes } = opts; + this.name = name; + this.terminating = terminating; + if (opts.paths && opts.pathHashPrefixes) { + throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive'); + } + this.paths = paths; + this.pathHashPrefixes = pathHashPrefixes; + } + equals(other) { + if (!(other instanceof DelegatedRole)) { + return false; + } + return (super.equals(other) && + this.name === other.name && + this.terminating === other.terminating && + util_1.default.isDeepStrictEqual(this.paths, other.paths) && + util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes)); + } + isDelegatedPath(targetFilepath) { + if (this.paths) { + return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern)); + } + if (this.pathHashPrefixes) { + const hasher = crypto_1.default.createHash('sha256'); + const pathHash = hasher.update(targetFilepath).digest('hex'); + return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix)); + } + return false; + } + toJSON() { + const json = { + ...super.toJSON(), + name: this.name, + terminating: this.terminating, + }; + if (this.paths) { + json.paths = this.paths; + } + if (this.pathHashPrefixes) { + json.path_hash_prefixes = this.pathHashPrefixes; + } + return json; + } + static fromJSON(data) { + const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof name !== 'string') { + throw new TypeError('name must be a string'); + } + if (typeof terminating !== 'boolean') { + throw new TypeError('terminating must be a boolean'); + } + if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { + throw new TypeError('paths must be an array of strings'); + } + if (utils_1.guard.isDefined(path_hash_prefixes) && + !utils_1.guard.isStringArray(path_hash_prefixes)) { + throw new TypeError('path_hash_prefixes must be an array of strings'); + } + return new DelegatedRole({ + keyIDs: keyids, + threshold, + name, + terminating, + paths, + pathHashPrefixes: path_hash_prefixes, + unrecognizedFields: rest, + }); + } +} +exports.DelegatedRole = DelegatedRole; +// JS version of Ruby's Array#zip +const zip = (a, b) => a.map((k, i) => [k, b[i]]); +function isTargetInPathPattern(target, pattern) { + const targetParts = target.split('/'); + const patternParts = pattern.split('/'); + if (patternParts.length != targetParts.length) { + return false; + } + return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart)); +} +/** + * Succinctly defines a hash bin delegation graph. + * + * A ``SuccinctRoles`` object describes a delegation graph that covers all + * targets, distributing them uniformly over the delegated roles (i.e. bins) + * in the graph. + * + * The total number of bins is 2 to the power of the passed ``bit_length``. + * + * Bin names are the concatenation of the passed ``name_prefix`` and a + * zero-padded hex representation of the bin index separated by a hyphen. + * + * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin + * is 'terminating'. + * + * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md + */ +class SuccinctRoles extends Role { + constructor(opts) { + super(opts); + const { bitLength, namePrefix } = opts; + if (bitLength <= 0 || bitLength > 32) { + throw new error_1.ValueError('bitLength must be between 1 and 32'); + } + this.bitLength = bitLength; + this.namePrefix = namePrefix; + // Calculate the suffix_len value based on the total number of bins in + // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will + // have a suffix between "000" and "3ff" in hex and suffix_len will be 3 + // meaning the third bin will have a suffix of "003". + this.numberOfBins = Math.pow(2, bitLength); + // suffix_len is calculated based on "number_of_bins - 1" as the name + // of the last bin contains the number "number_of_bins -1" as a suffix. + this.suffixLen = (this.numberOfBins - 1).toString(16).length; + } + equals(other) { + if (!(other instanceof SuccinctRoles)) { + return false; + } + return (super.equals(other) && + this.bitLength === other.bitLength && + this.namePrefix === other.namePrefix); + } + /*** + * Calculates the name of the delegated role responsible for 'target_filepath'. + * + * The target at path ''target_filepath' is assigned to a bin by casting + * the left-most 'bit_length' of bits of the file path hash digest to + * int, using it as bin index between 0 and '2**bit_length - 1'. + * + * Args: + * target_filepath: URL path to a target file, relative to a base + * targets URL. + */ + getRoleForTarget(targetFilepath) { + const hasher = crypto_1.default.createHash('sha256'); + const hasherBuffer = hasher.update(targetFilepath).digest(); + // can't ever need more than 4 bytes (32 bits). + const hashBytes = hasherBuffer.subarray(0, 4); + // Right shift hash bytes, so that we only have the leftmost + // bit_length bits that we care about. + const shiftValue = 32 - this.bitLength; + const binNumber = hashBytes.readUInt32BE() >>> shiftValue; + // Add zero padding if necessary and cast to hex the suffix. + const suffix = binNumber.toString(16).padStart(this.suffixLen, '0'); + return `${this.namePrefix}-${suffix}`; + } + *getRoles() { + for (let i = 0; i < this.numberOfBins; i++) { + const suffix = i.toString(16).padStart(this.suffixLen, '0'); + yield `${this.namePrefix}-${suffix}`; + } + } + /*** + * Determines whether the given ``role_name`` is in one of + * the delegated roles that ``SuccinctRoles`` represents. + * + * Args: + * role_name: The name of the role to check against. + */ + isDelegatedRole(roleName) { + const desiredPrefix = this.namePrefix + '-'; + if (!roleName.startsWith(desiredPrefix)) { + return false; + } + const suffix = roleName.slice(desiredPrefix.length, roleName.length); + if (suffix.length != this.suffixLen) { + return false; + } + // make sure the suffix is a hex string + if (!suffix.match(/^[0-9a-fA-F]+$/)) { + return false; + } + const num = parseInt(suffix, 16); + return 0 <= num && num < this.numberOfBins; + } + toJSON() { + const json = { + ...super.toJSON(), + bit_length: this.bitLength, + name_prefix: this.namePrefix, + }; + return json; + } + static fromJSON(data) { + const { keyids, threshold, bit_length, name_prefix, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof bit_length !== 'number') { + throw new TypeError('bit_length must be a number'); + } + if (typeof name_prefix !== 'string') { + throw new TypeError('name_prefix must be a string'); + } + return new SuccinctRoles({ + keyIDs: keyids, + threshold, + bitLength: bit_length, + namePrefix: name_prefix, + unrecognizedFields: rest, + }); + } +} +exports.SuccinctRoles = SuccinctRoles; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/root.js b/node_modules/pacote/node_modules/@tufjs/models/dist/root.js new file mode 100644 index 0000000000000..36d0ef0f186d1 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/root.js @@ -0,0 +1,116 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Root = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of root metadata. + * + * The top-level role and metadata file signed by the root keys. + * This role specifies trusted keys for all other top-level roles, which may further delegate trust. + */ +class Root extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Root; + this.keys = options.keys || {}; + this.consistentSnapshot = options.consistentSnapshot ?? true; + if (!options.roles) { + this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({ + ...acc, + [role]: new role_1.Role({ keyIDs: [], threshold: 1 }), + }), {}); + } + else { + const roleNames = new Set(Object.keys(options.roles)); + if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) { + throw new error_1.ValueError('missing top-level role'); + } + this.roles = options.roles; + } + } + addKey(key, role) { + if (!this.roles[role]) { + throw new error_1.ValueError(`role ${role} does not exist`); + } + if (!this.roles[role].keyIDs.includes(key.keyID)) { + this.roles[role].keyIDs.push(key.keyID); + } + this.keys[key.keyID] = key; + } + equals(other) { + if (!(other instanceof Root)) { + return false; + } + return (super.equals(other) && + this.consistentSnapshot === other.consistentSnapshot && + util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles)); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + keys: keysToJSON(this.keys), + roles: rolesToJSON(this.roles), + consistent_snapshot: this.consistentSnapshot, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields; + if (typeof consistent_snapshot !== 'boolean') { + throw new TypeError('consistent_snapshot must be a boolean'); + } + return new Root({ + ...commonFields, + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + consistentSnapshot: consistent_snapshot, + unrecognizedFields: rest, + }); + } +} +exports.Root = Root; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {}); +} +function rolesToJSON(roles) { + return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {}); +} +function keysFromJSON(data) { + let keys; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys must be an object'); + } + keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); + } + return keys; +} +function rolesFromJSON(data) { + let roles; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('roles must be an object'); + } + roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ + ...acc, + [roleName]: role_1.Role.fromJSON(roleData), + }), {}); + } + return roles; +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js b/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js new file mode 100644 index 0000000000000..33eb204eb0835 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = void 0; +/** + * A container class containing information about a signature. + * + * Contains a signature and the keyid uniquely identifying the key used + * to generate the signature. + * + * Provide a `fromJSON` method to create a Signature from a JSON object. + */ +class Signature { + constructor(options) { + const { keyID, sig } = options; + this.keyID = keyID; + this.sig = sig; + } + toJSON() { + return { + keyid: this.keyID, + sig: this.sig, + }; + } + static fromJSON(data) { + const { keyid, sig } = data; + if (typeof keyid !== 'string') { + throw new TypeError('keyid must be a string'); + } + if (typeof sig !== 'string') { + throw new TypeError('sig must be a string'); + } + return new Signature({ + keyID: keyid, + sig: sig, + }); + } +} +exports.Signature = Signature; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js new file mode 100644 index 0000000000000..e90ea8e729e4e --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js @@ -0,0 +1,71 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Snapshot = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of snapshot metadata. + * + * Snapshot contains information about all target Metadata files. + * A top-level role that specifies the latest versions of all targets metadata files, + * and hence the latest versions of all targets (including any dependencies between them) on the repository. + */ +class Snapshot extends base_1.Signed { + constructor(opts) { + super(opts); + this.type = base_1.MetadataKind.Snapshot; + this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; + } + equals(other) { + if (!(other instanceof Snapshot)) { + return false; + } + return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta); + } + toJSON() { + return { + _type: this.type, + meta: metaToJSON(this.meta), + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Snapshot({ + ...commonFields, + meta: metaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Snapshot = Snapshot; +function metaToJSON(meta) { + return Object.entries(meta).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: metadata.toJSON(), + }), {}); +} +function metaFromJSON(data) { + let meta; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('meta field is malformed'); + } + else { + meta = Object.entries(data).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: file_1.MetaFile.fromJSON(metadata), + }), {}); + } + } + return meta; +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js b/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js new file mode 100644 index 0000000000000..54bd8f8c554af --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js @@ -0,0 +1,92 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Targets = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const delegations_1 = require("./delegations"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +// Container for the signed part of targets metadata. +// +// Targets contains verifying information about target files and also delegates +// responsible to other Targets roles. +class Targets extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Targets; + this.targets = options.targets || {}; + this.delegations = options.delegations; + } + addTarget(target) { + this.targets[target.path] = target; + } + equals(other) { + if (!(other instanceof Targets)) { + return false; + } + return (super.equals(other) && + util_1.default.isDeepStrictEqual(this.targets, other.targets) && + util_1.default.isDeepStrictEqual(this.delegations, other.delegations)); + } + toJSON() { + const json = { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + targets: targetsToJSON(this.targets), + ...this.unrecognizedFields, + }; + if (this.delegations) { + json.delegations = this.delegations.toJSON(); + } + return json; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { targets, delegations, ...rest } = unrecognizedFields; + return new Targets({ + ...commonFields, + targets: targetsFromJSON(targets), + delegations: delegationsFromJSON(delegations), + unrecognizedFields: rest, + }); + } +} +exports.Targets = Targets; +function targetsToJSON(targets) { + return Object.entries(targets).reduce((acc, [path, target]) => ({ + ...acc, + [path]: target.toJSON(), + }), {}); +} +function targetsFromJSON(data) { + let targets; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('targets must be an object'); + } + else { + targets = Object.entries(data).reduce((acc, [path, target]) => ({ + ...acc, + [path]: file_1.TargetFile.fromJSON(path, target), + }), {}); + } + } + return targets; +} +function delegationsFromJSON(data) { + let delegations; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObject(data)) { + throw new TypeError('delegations must be an object'); + } + else { + delegations = delegations_1.Delegations.fromJSON(data); + } + } + return delegations; +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js new file mode 100644 index 0000000000000..9880c4c9fc254 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js @@ -0,0 +1,58 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of timestamp metadata. + * + * A top-level that specifies the latest version of the snapshot role metadata file, + * and hence the latest versions of all metadata and targets on the repository. + */ +class Timestamp extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Timestamp; + this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); + } + equals(other) { + if (!(other instanceof Timestamp)) { + return false; + } + return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + meta: { 'snapshot.json': this.snapshotMeta.toJSON() }, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Timestamp({ + ...commonFields, + snapshotMeta: snapshotMetaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Timestamp = Timestamp; +function snapshotMetaFromJSON(data) { + let snapshotMeta; + if (utils_1.guard.isDefined(data)) { + const snapshotData = data['snapshot.json']; + if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { + throw new TypeError('missing snapshot.json in meta'); + } + else { + snapshotMeta = file_1.MetaFile.fromJSON(snapshotData); + } + } + return snapshotMeta; +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js new file mode 100644 index 0000000000000..911e8475986bb --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isDefined = isDefined; +exports.isObject = isObject; +exports.isStringArray = isStringArray; +exports.isObjectArray = isObjectArray; +exports.isStringRecord = isStringRecord; +exports.isObjectRecord = isObjectRecord; +function isDefined(val) { + return val !== undefined; +} +function isObject(value) { + return typeof value === 'object' && value !== null; +} +function isStringArray(value) { + return Array.isArray(value) && value.every((v) => typeof v === 'string'); +} +function isObjectArray(value) { + return Array.isArray(value) && value.every(isObject); +} +function isStringRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'string')); +} +function isObjectRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'object' && v !== null)); +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js new file mode 100644 index 0000000000000..872aae28049c9 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js @@ -0,0 +1,28 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.crypto = exports.guard = void 0; +exports.guard = __importStar(require("./guard")); +exports.crypto = __importStar(require("./verify")); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js new file mode 100644 index 0000000000000..3c3ec07f1425a --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js @@ -0,0 +1,142 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getPublicKey = getPublicKey; +const crypto_1 = __importDefault(require("crypto")); +const error_1 = require("../error"); +const oid_1 = require("./oid"); +const ASN1_TAG_SEQUENCE = 0x30; +const ANS1_TAG_BIT_STRING = 0x03; +const NULL_BYTE = 0x00; +const OID_EDDSA = '1.3.101.112'; +const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1'; +const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7'; +const PEM_HEADER = '-----BEGIN PUBLIC KEY-----'; +function getPublicKey(keyInfo) { + switch (keyInfo.keyType) { + case 'rsa': + return getRSAPublicKey(keyInfo); + case 'ed25519': + return getED25519PublicKey(keyInfo); + case 'ecdsa': + case 'ecdsa-sha2-nistp256': + case 'ecdsa-sha2-nistp384': + return getECDCSAPublicKey(keyInfo); + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`); + } +} +function getRSAPublicKey(keyInfo) { + // Only support PEM-encoded RSA keys + if (!keyInfo.keyVal.startsWith(PEM_HEADER)) { + throw new error_1.CryptoError('Invalid key format'); + } + const key = crypto_1.default.createPublicKey(keyInfo.keyVal); + switch (keyInfo.scheme) { + case 'rsassa-pss-sha256': + return { + key: key, + padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING, + }; + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`); + } +} +function getED25519PublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ed25519.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +function getECDCSAPublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ecdsa.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +const ed25519 = { + // Translates a hex key into a crypto KeyObject + // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/ + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const oid = (0, oid_1.encodeOIDString)(OID_EDDSA); + // Create a byte sequence containing the OID and key + const elements = Buffer.concat([ + Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oid.length]), + oid, + ]), + Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]), + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([elements.length]), + elements, + ]); + return der; + }, +}; +const ecdsa = { + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const bitString = Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]); + const oids = Buffer.concat([ + (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY), + (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1), + ]); + const oidSequence = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oids.length]), + oids, + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oidSequence.length + bitString.length]), + oidSequence, + bitString, + ]); + return der; + }, +}; +const isHex = (key) => /^[0-9a-fA-F]+$/.test(key); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js new file mode 100644 index 0000000000000..00b29c3030d1e --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.encodeOIDString = encodeOIDString; +const ANS1_TAG_OID = 0x06; +function encodeOIDString(oid) { + const parts = oid.split('.'); + // The first two subidentifiers are encoded into the first byte + const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10); + const rest = []; + parts.slice(2).forEach((part) => { + const bytes = encodeVariableLengthInteger(parseInt(part, 10)); + rest.push(...bytes); + }); + const der = Buffer.from([first, ...rest]); + return Buffer.from([ANS1_TAG_OID, der.length, ...der]); +} +function encodeVariableLengthInteger(value) { + const bytes = []; + let mask = 0x00; + while (value > 0) { + bytes.unshift((value & 0x7f) | mask); + value >>= 7; + mask = 0x80; + } + return bytes; +} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js new file mode 100644 index 0000000000000..8232b6f6a97ab --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js @@ -0,0 +1,13 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySignature = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const crypto_1 = __importDefault(require("crypto")); +const verifySignature = (metaDataSignedData, key, signature) => { + const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData)); + return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex')); +}; +exports.verifySignature = verifySignature; diff --git a/node_modules/pacote/node_modules/@tufjs/models/package.json b/node_modules/pacote/node_modules/@tufjs/models/package.json new file mode 100644 index 0000000000000..8e5132ddf1079 --- /dev/null +++ b/node_modules/pacote/node_modules/@tufjs/models/package.json @@ -0,0 +1,37 @@ +{ + "name": "@tufjs/models", + "version": "3.0.1", + "description": "TUF metadata models", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/node_modules/sigstore/LICENSE b/node_modules/pacote/node_modules/sigstore/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/pacote/node_modules/sigstore/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/pacote/node_modules/sigstore/dist/config.js b/node_modules/pacote/node_modules/sigstore/dist/config.js new file mode 100644 index 0000000000000..e8b2392f97f23 --- /dev/null +++ b/node_modules/pacote/node_modules/sigstore/dist/config.js @@ -0,0 +1,120 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0; +exports.createBundleBuilder = createBundleBuilder; +exports.createKeyFinder = createKeyFinder; +exports.createVerificationPolicy = createVerificationPolicy; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const sign_1 = require("@sigstore/sign"); +const verify_1 = require("@sigstore/verify"); +exports.DEFAULT_RETRY = { retries: 2 }; +exports.DEFAULT_TIMEOUT = 5000; +function createBundleBuilder(bundleType, options) { + const bundlerOptions = { + signer: initSigner(options), + witnesses: initWitnesses(options), + }; + switch (bundleType) { + case 'messageSignature': + return new sign_1.MessageSignatureBundleBuilder(bundlerOptions); + case 'dsseEnvelope': + return new sign_1.DSSEBundleBuilder({ + ...bundlerOptions, + certificateChain: options.legacyCompatibility, + }); + } +} +// Translates the public KeySelector type into the KeyFinderFunc type needed by +// the verifier. +function createKeyFinder(keySelector) { + return (hint) => { + const key = keySelector(hint); + if (!key) { + throw new verify_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `key not found: ${hint}`, + }); + } + return { + publicKey: core_1.crypto.createPublicKey(key), + validFor: () => true, + }; + }; +} +function createVerificationPolicy(options) { + const policy = {}; + const san = options.certificateIdentityEmail || options.certificateIdentityURI; + if (san) { + policy.subjectAlternativeName = san; + } + if (options.certificateIssuer) { + policy.extensions = { issuer: options.certificateIssuer }; + } + return policy; +} +// Instantiate the FulcioSigner based on the supplied options. +function initSigner(options) { + return new sign_1.FulcioSigner({ + fulcioBaseURL: options.fulcioURL, + identityProvider: options.identityProvider || initIdentityProvider(options), + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, + }); +} +// Instantiate an identity provider based on the supplied options. If an +// explicit identity token is provided, use that. Otherwise, use the CI +// context provider. +function initIdentityProvider(options) { + const token = options.identityToken; + if (token) { + /* istanbul ignore next */ + return { getToken: () => Promise.resolve(token) }; + } + else { + return new sign_1.CIContextProvider('sigstore'); + } +} +// Instantiate a collection of witnesses based on the supplied options. +function initWitnesses(options) { + const witnesses = []; + if (isRekorEnabled(options)) { + witnesses.push(new sign_1.RekorWitness({ + rekorBaseURL: options.rekorURL, + entryType: options.legacyCompatibility ? 'intoto' : 'dsse', + fetchOnConflict: false, + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, + })); + } + if (isTSAEnabled(options)) { + witnesses.push(new sign_1.TSAWitness({ + tsaBaseURL: options.tsaServerURL, + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, + })); + } + return witnesses; +} +// Type assertion to ensure that Rekor is enabled +function isRekorEnabled(options) { + return options.tlogUpload !== false; +} +// Type assertion to ensure that TSA is enabled +function isTSAEnabled(options) { + return options.tsaServerURL !== undefined; +} diff --git a/node_modules/pacote/node_modules/sigstore/dist/index.js b/node_modules/pacote/node_modules/sigstore/dist/index.js new file mode 100644 index 0000000000000..7f6a5cf86bbfc --- /dev/null +++ b/node_modules/pacote/node_modules/sigstore/dist/index.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0; +/* +Copyright 2022 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var bundle_1 = require("@sigstore/bundle"); +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } }); +var sign_1 = require("@sigstore/sign"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } }); +var tuf_1 = require("@sigstore/tuf"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } }); +var verify_1 = require("@sigstore/verify"); +Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return verify_1.PolicyError; } }); +Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return verify_1.VerificationError; } }); +var sigstore_1 = require("./sigstore"); +Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } }); +Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } }); +Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } }); +Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } }); diff --git a/node_modules/pacote/node_modules/sigstore/dist/sigstore.js b/node_modules/pacote/node_modules/sigstore/dist/sigstore.js new file mode 100644 index 0000000000000..2b37ef46b7438 --- /dev/null +++ b/node_modules/pacote/node_modules/sigstore/dist/sigstore.js @@ -0,0 +1,102 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sign = sign; +exports.attest = attest; +exports.verify = verify; +exports.createVerifier = createVerifier; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("@sigstore/bundle"); +const tuf = __importStar(require("@sigstore/tuf")); +const verify_1 = require("@sigstore/verify"); +const config = __importStar(require("./config")); +async function sign(payload, +/* istanbul ignore next */ +options = {}) { + const bundler = config.createBundleBuilder('messageSignature', options); + const bundle = await bundler.create({ data: payload }); + return (0, bundle_1.bundleToJSON)(bundle); +} +async function attest(payload, payloadType, +/* istanbul ignore next */ +options = {}) { + const bundler = config.createBundleBuilder('dsseEnvelope', options); + const bundle = await bundler.create({ data: payload, type: payloadType }); + return (0, bundle_1.bundleToJSON)(bundle); +} +async function verify(bundle, dataOrOptions, options) { + let data; + if (Buffer.isBuffer(dataOrOptions)) { + data = dataOrOptions; + } + else { + options = dataOrOptions; + } + return createVerifier(options).then((verifier) => verifier.verify(bundle, data)); +} +async function createVerifier( +/* istanbul ignore next */ +options = {}) { + const trustedRoot = await tuf.getTrustedRoot({ + mirrorURL: options.tufMirrorURL, + rootPath: options.tufRootPath, + cachePath: options.tufCachePath, + forceCache: options.tufForceCache, + retry: options.retry ?? config.DEFAULT_RETRY, + timeout: options.timeout ?? config.DEFAULT_TIMEOUT, + }); + const keyFinder = options.keySelector + ? config.createKeyFinder(options.keySelector) + : undefined; + const trustMaterial = (0, verify_1.toTrustMaterial)(trustedRoot, keyFinder); + const verifierOptions = { + ctlogThreshold: options.ctLogThreshold, + tlogThreshold: options.tlogThreshold, + }; + const verifier = new verify_1.Verifier(trustMaterial, verifierOptions); + const policy = config.createVerificationPolicy(options); + return { + verify: (bundle, payload) => { + const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle); + const signedEntity = (0, verify_1.toSignedEntity)(deserializedBundle, payload); + verifier.verify(signedEntity, policy); + return; + }, + }; +} diff --git a/node_modules/pacote/node_modules/sigstore/package.json b/node_modules/pacote/node_modules/sigstore/package.json new file mode 100644 index 0000000000000..0f798a263657b --- /dev/null +++ b/node_modules/pacote/node_modules/sigstore/package.json @@ -0,0 +1,47 @@ +{ + "name": "sigstore", + "version": "3.0.0", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/client#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/rekor-types": "^3.0.0", + "@sigstore/jest": "^0.0.0", + "@sigstore/mock": "^0.8.0", + "@tufjs/repo-mock": "^3.0.1", + "@types/make-fetch-happen": "^10.0.4" + }, + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^3.0.0", + "@sigstore/tuf": "^3.0.0", + "@sigstore/verify": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/node_modules/tuf-js/LICENSE b/node_modules/pacote/node_modules/tuf-js/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pacote/node_modules/tuf-js/dist/config.js b/node_modules/pacote/node_modules/tuf-js/dist/config.js new file mode 100644 index 0000000000000..c66d76af86b98 --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/config.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultConfig = void 0; +exports.defaultConfig = { + maxRootRotations: 256, + maxDelegations: 32, + rootMaxLength: 512000, //bytes + timestampMaxLength: 16384, // bytes + snapshotMaxLength: 2000000, // bytes + targetsMaxLength: 5000000, // bytes + prefixTargetsWithHash: true, + fetchTimeout: 100000, // milliseconds + fetchRetries: undefined, + fetchRetry: 2, +}; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/error.js b/node_modules/pacote/node_modules/tuf-js/dist/error.js new file mode 100644 index 0000000000000..f4b10fa202895 --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/error.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +class RuntimeError extends Error { +} +exports.RuntimeError = RuntimeError; +class PersistError extends Error { +} +exports.PersistError = PersistError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error for metadata that contains an invalid version number. +class BadVersionError extends RepositoryError { +} +exports.BadVersionError = BadVersionError; +// An error for metadata containing a previously verified version number. +class EqualVersionError extends BadVersionError { +} +exports.EqualVersionError = EqualVersionError; +// Indicate that a TUF Metadata file has expired. +class ExpiredMetadataError extends RepositoryError { +} +exports.ExpiredMetadataError = ExpiredMetadataError; +//----- Download Errors ------------------------------------------------------- +// An error occurred while attempting to download a file. +class DownloadError extends Error { +} +exports.DownloadError = DownloadError; +// Indicate that a mismatch of lengths was seen while downloading a file +class DownloadLengthMismatchError extends DownloadError { +} +exports.DownloadLengthMismatchError = DownloadLengthMismatchError; +// Returned by FetcherInterface implementations for HTTP errors. +class DownloadHTTPError extends DownloadError { + constructor(message, statusCode) { + super(message); + this.statusCode = statusCode; + } +} +exports.DownloadHTTPError = DownloadHTTPError; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js b/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js new file mode 100644 index 0000000000000..f966ce1bb0cdc --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js @@ -0,0 +1,84 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultFetcher = exports.BaseFetcher = void 0; +const debug_1 = __importDefault(require("debug")); +const fs_1 = __importDefault(require("fs")); +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const tmpfile_1 = require("./utils/tmpfile"); +const log = (0, debug_1.default)('tuf:fetch'); +class BaseFetcher { + // Download file from given URL. The file is downloaded to a temporary + // location and then passed to the given handler. The handler is responsible + // for moving the file to its final location. The temporary file is deleted + // after the handler returns. + async downloadFile(url, maxLength, handler) { + return (0, tmpfile_1.withTempFile)(async (tmpFile) => { + const reader = await this.fetch(url); + let numberOfBytesReceived = 0; + const fileStream = fs_1.default.createWriteStream(tmpFile); + // Read the stream a chunk at a time so that we can check + // the length of the file as we go + try { + for await (const chunk of reader) { + const bufferChunk = Buffer.from(chunk); + numberOfBytesReceived += bufferChunk.length; + if (numberOfBytesReceived > maxLength) { + throw new error_1.DownloadLengthMismatchError('Max length reached'); + } + await writeBufferToStream(fileStream, bufferChunk); + } + } + finally { + // Make sure we always close the stream + await util_1.default.promisify(fileStream.close).bind(fileStream)(); + } + return handler(tmpFile); + }); + } + // Download bytes from given URL. + async downloadBytes(url, maxLength) { + return this.downloadFile(url, maxLength, async (file) => { + const stream = fs_1.default.createReadStream(file); + const chunks = []; + for await (const chunk of stream) { + chunks.push(chunk); + } + return Buffer.concat(chunks); + }); + } +} +exports.BaseFetcher = BaseFetcher; +class DefaultFetcher extends BaseFetcher { + constructor(options = {}) { + super(); + this.timeout = options.timeout; + this.retry = options.retry; + } + async fetch(url) { + log('GET %s', url); + const response = await (0, make_fetch_happen_1.default)(url, { + timeout: this.timeout, + retry: this.retry, + }); + if (!response.ok || !response?.body) { + throw new error_1.DownloadHTTPError('Failed to download', response.status); + } + return response.body; + } +} +exports.DefaultFetcher = DefaultFetcher; +const writeBufferToStream = async (stream, buffer) => { + return new Promise((resolve, reject) => { + stream.write(buffer, (err) => { + if (err) { + reject(err); + } + resolve(true); + }); + }); +}; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/index.js b/node_modules/pacote/node_modules/tuf-js/dist/index.js new file mode 100644 index 0000000000000..5a83b91f355d8 --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/index.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0; +var models_1 = require("@tufjs/models"); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } }); +var fetcher_1 = require("./fetcher"); +Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } }); +var updater_1 = require("./updater"); +Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } }); diff --git a/node_modules/pacote/node_modules/tuf-js/dist/store.js b/node_modules/pacote/node_modules/tuf-js/dist/store.js new file mode 100644 index 0000000000000..8567336108709 --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/store.js @@ -0,0 +1,208 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TrustedMetadataStore = void 0; +const models_1 = require("@tufjs/models"); +const error_1 = require("./error"); +class TrustedMetadataStore { + constructor(rootData) { + this.trustedSet = {}; + // Client workflow 5.1: record fixed update start time + this.referenceTime = new Date(); + // Client workflow 5.2: load trusted root metadata + this.loadTrustedRoot(rootData); + } + get root() { + if (!this.trustedSet.root) { + throw new ReferenceError('No trusted root metadata'); + } + return this.trustedSet.root; + } + get timestamp() { + return this.trustedSet.timestamp; + } + get snapshot() { + return this.trustedSet.snapshot; + } + get targets() { + return this.trustedSet.targets; + } + getRole(name) { + return this.trustedSet[name]; + } + updateRoot(bytesBuffer) { + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (newRoot.signed.type != models_1.MetadataKind.Root) { + throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`); + } + // Client workflow 5.4: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot); + // Client workflow 5.5: check for rollback attack + if (newRoot.signed.version != this.root.signed.version + 1) { + throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`); + } + // Check that new root is signed by self + newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot); + // Client workflow 5.7: set new root as trusted root + this.trustedSet.root = newRoot; + return newRoot; + } + updateTimestamp(bytesBuffer) { + if (this.snapshot) { + throw new error_1.RuntimeError('Cannot update timestamp after snapshot'); + } + if (this.root.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('Final root.json is expired'); + } + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data); + if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) { + throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`); + } + // Client workflow 5.4.2: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp); + if (this.timestamp) { + // Prevent rolling back timestamp version + // Client workflow 5.4.3.1: check for rollback attack + if (newTimestamp.signed.version < this.timestamp.signed.version) { + throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`); + } + // Keep using old timestamp if versions are equal. + if (newTimestamp.signed.version === this.timestamp.signed.version) { + throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`); + } + // Prevent rolling back snapshot version + // Client workflow 5.4.3.2: check for rollback attack + const snapshotMeta = this.timestamp.signed.snapshotMeta; + const newSnapshotMeta = newTimestamp.signed.snapshotMeta; + if (newSnapshotMeta.version < snapshotMeta.version) { + throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`); + } + } + // expiry not checked to allow old timestamp to be used for rollback + // protection of new timestamp: expiry is checked in update_snapshot + this.trustedSet.timestamp = newTimestamp; + // Client workflow 5.4.4: check for freeze attack + this.checkFinalTimestamp(); + return newTimestamp; + } + updateSnapshot(bytesBuffer, trusted = false) { + if (!this.timestamp) { + throw new error_1.RuntimeError('Cannot update snapshot before timestamp'); + } + if (this.targets) { + throw new error_1.RuntimeError('Cannot update snapshot after targets'); + } + // Snapshot cannot be loaded if final timestamp is expired + this.checkFinalTimestamp(); + const snapshotMeta = this.timestamp.signed.snapshotMeta; + // Verify non-trusted data against the hashes in timestamp, if any. + // Trusted snapshot data has already been verified once. + // Client workflow 5.5.2: check against timestamp role's snaphsot hash + if (!trusted) { + snapshotMeta.verify(bytesBuffer); + } + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data); + if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) { + throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`); + } + // Client workflow 5.5.3: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot); + // version check against meta version (5.5.4) is deferred to allow old + // snapshot to be used in rollback protection + // Client workflow 5.5.5: check for rollback attack + if (this.snapshot) { + Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => { + const newFileInfo = newSnapshot.signed.meta[fileName]; + if (!newFileInfo) { + throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`); + } + if (newFileInfo.version < fileInfo.version) { + throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`); + } + }); + } + this.trustedSet.snapshot = newSnapshot; + // snapshot is loaded, but we raise if it's not valid _final_ snapshot + // Client workflow 5.5.4 & 5.5.6 + this.checkFinalSnapsnot(); + return newSnapshot; + } + updateDelegatedTargets(bytesBuffer, roleName, delegatorName) { + if (!this.snapshot) { + throw new error_1.RuntimeError('Cannot update delegated targets before snapshot'); + } + // Targets cannot be loaded if final snapshot is expired or its version + // does not match meta version in timestamp. + this.checkFinalSnapsnot(); + const delegator = this.trustedSet[delegatorName]; + if (!delegator) { + throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`); + } + // Extract metadata for the delegated role from snapshot + const meta = this.snapshot.signed.meta?.[`${roleName}.json`]; + if (!meta) { + throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`); + } + // Client workflow 5.6.2: check against snapshot role's targets hash + meta.verify(bytesBuffer); + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data); + if (newDelegate.signed.type != models_1.MetadataKind.Targets) { + throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`); + } + // Client workflow 5.6.3: check for arbitrary software attack + delegator.verifyDelegate(roleName, newDelegate); + // Client workflow 5.6.4: Check against snapshot role’s targets version + const version = newDelegate.signed.version; + if (version != meta.version) { + throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`); + } + // Client workflow 5.6.5: check for a freeze attack + if (newDelegate.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`); + } + this.trustedSet[roleName] = newDelegate; + } + // Verifies and loads data as trusted root metadata. + // Note that an expired initial root is still considered valid. + loadTrustedRoot(bytesBuffer) { + const data = JSON.parse(bytesBuffer.toString('utf8')); + const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (root.signed.type != models_1.MetadataKind.Root) { + throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`); + } + root.verifyDelegate(models_1.MetadataKind.Root, root); + this.trustedSet['root'] = root; + } + checkFinalTimestamp() { + // Timestamp MUST be loaded + if (!this.timestamp) { + throw new ReferenceError('No trusted timestamp metadata'); + } + // Client workflow 5.4.4: check for freeze attack + if (this.timestamp.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('Final timestamp.json is expired'); + } + } + checkFinalSnapsnot() { + // Snapshot and timestamp MUST be loaded + if (!this.snapshot) { + throw new ReferenceError('No trusted snapshot metadata'); + } + if (!this.timestamp) { + throw new ReferenceError('No trusted timestamp metadata'); + } + // Client workflow 5.5.6: check for freeze attack + if (this.snapshot.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('snapshot.json is expired'); + } + // Client workflow 5.5.4: check against timestamp role’s snapshot version + const snapshotMeta = this.timestamp.signed.snapshotMeta; + if (this.snapshot.signed.version !== snapshotMeta.version) { + throw new error_1.BadVersionError("Snapshot version doesn't match timestamp"); + } + } +} +exports.TrustedMetadataStore = TrustedMetadataStore; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/updater.js b/node_modules/pacote/node_modules/tuf-js/dist/updater.js new file mode 100644 index 0000000000000..8d5eb4428f044 --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/updater.js @@ -0,0 +1,350 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Updater = void 0; +const models_1 = require("@tufjs/models"); +const debug_1 = __importDefault(require("debug")); +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); +const config_1 = require("./config"); +const error_1 = require("./error"); +const fetcher_1 = require("./fetcher"); +const store_1 = require("./store"); +const url = __importStar(require("./utils/url")); +const log = (0, debug_1.default)('tuf:cache'); +class Updater { + constructor(options) { + const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; + this.dir = metadataDir; + this.metadataBaseUrl = metadataBaseUrl; + this.targetDir = targetDir; + this.targetBaseUrl = targetBaseUrl; + this.forceCache = options.forceCache ?? false; + const data = this.loadLocalMetadata(models_1.MetadataKind.Root); + this.trustedSet = new store_1.TrustedMetadataStore(data); + this.config = { ...config_1.defaultConfig, ...config }; + this.fetcher = + fetcher || + new fetcher_1.DefaultFetcher({ + timeout: this.config.fetchTimeout, + retry: this.config.fetchRetries ?? this.config.fetchRetry, + }); + } + // refresh and load the metadata before downloading the target + // refresh should be called once after the client is initialized + async refresh() { + // If forceCache is true, try to load the timestamp from local storage + // without fetching it from the remote. Otherwise, load the root and + // timestamp from the remote per the TUF spec. + if (this.forceCache) { + // If anything fails, load the root and timestamp from the remote. This + // should cover any situation where the local metadata is corrupted or + // expired. + try { + await this.loadTimestamp({ checkRemote: false }); + } + catch (error) { + await this.loadRoot(); + await this.loadTimestamp(); + } + } + else { + await this.loadRoot(); + await this.loadTimestamp(); + } + await this.loadSnapshot(); + await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root); + } + // Returns the TargetFile instance with information for the given target path. + // + // Implicitly calls refresh if it hasn't already been called. + async getTargetInfo(targetPath) { + if (!this.trustedSet.targets) { + await this.refresh(); + } + return this.preorderDepthFirstWalk(targetPath); + } + async downloadTarget(targetInfo, filePath, targetBaseUrl) { + const targetPath = filePath || this.generateTargetPath(targetInfo); + if (!targetBaseUrl) { + if (!this.targetBaseUrl) { + throw new error_1.ValueError('Target base URL not set'); + } + targetBaseUrl = this.targetBaseUrl; + } + let targetFilePath = targetInfo.path; + const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot; + if (consistentSnapshot && this.config.prefixTargetsWithHash) { + const hashes = Object.values(targetInfo.hashes); + const { dir, base } = path.parse(targetFilePath); + const filename = `${hashes[0]}.${base}`; + targetFilePath = dir ? `${dir}/${filename}` : filename; + } + const targetUrl = url.join(targetBaseUrl, targetFilePath); + // Client workflow 5.7.3: download target file + await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => { + // Verify hashes and length of downloaded file + await targetInfo.verify(fs.createReadStream(fileName)); + // Copy file to target path + log('WRITE %s', targetPath); + fs.copyFileSync(fileName, targetPath); + }); + return targetPath; + } + async findCachedTarget(targetInfo, filePath) { + if (!filePath) { + filePath = this.generateTargetPath(targetInfo); + } + try { + if (fs.existsSync(filePath)) { + await targetInfo.verify(fs.createReadStream(filePath)); + return filePath; + } + } + catch (error) { + return; // File not found + } + return; // File not found + } + loadLocalMetadata(fileName) { + const filePath = path.join(this.dir, `${fileName}.json`); + log('READ %s', filePath); + return fs.readFileSync(filePath); + } + // Sequentially load and persist on local disk every newer root metadata + // version available on the remote. + // Client workflow 5.3: update root role + async loadRoot() { + // Client workflow 5.3.2: version of trusted root metadata file + const rootVersion = this.trustedSet.root.signed.version; + const lowerBound = rootVersion + 1; + const upperBound = lowerBound + this.config.maxRootRotations; + for (let version = lowerBound; version < upperBound; version++) { + const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`); + try { + // Client workflow 5.3.3: download new root metadata file + const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength); + // Client workflow 5.3.4 - 5.4.7 + this.trustedSet.updateRoot(bytesData); + // Client workflow 5.3.8: persist root metadata file + this.persistMetadata(models_1.MetadataKind.Root, bytesData); + } + catch (error) { + if (error instanceof error_1.DownloadHTTPError) { + // 404/403 means current root is newest available + if ([403, 404].includes(error.statusCode)) { + break; + } + } + throw error; + } + } + } + // Load local and remote timestamp metadata. + // Client workflow 5.4: update timestamp role + async loadTimestamp({ checkRemote } = { checkRemote: true }) { + // Load local and remote timestamp metadata + try { + const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp); + this.trustedSet.updateTimestamp(data); + // If checkRemote is disabled, return here to avoid fetching the remote + // timestamp metadata. + if (!checkRemote) { + return; + } + } + catch (error) { + // continue + } + //Load from remote (whether local load succeeded or not) + const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json'); + // Client workflow 5.4.1: download timestamp metadata file + const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength); + try { + // Client workflow 5.4.2 - 5.4.4 + this.trustedSet.updateTimestamp(bytesData); + } + catch (error) { + // If new timestamp version is same as current, discardd the new one. + // This is normal and should NOT raise an error. + if (error instanceof error_1.EqualVersionError) { + return; + } + // Re-raise any other error + throw error; + } + // Client workflow 5.4.5: persist timestamp metadata + this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData); + } + // Load local and remote snapshot metadata. + // Client workflow 5.5: update snapshot role + async loadSnapshot() { + //Load local (and if needed remote) snapshot metadata + try { + const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot); + this.trustedSet.updateSnapshot(data, true); + } + catch (error) { + if (!this.trustedSet.timestamp) { + throw new ReferenceError('No timestamp metadata'); + } + const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta; + const maxLength = snapshotMeta.length || this.config.snapshotMaxLength; + const version = this.trustedSet.root.signed.consistentSnapshot + ? snapshotMeta.version + : undefined; + const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json'); + try { + // Client workflow 5.5.1: download snapshot metadata file + const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength); + // Client workflow 5.5.2 - 5.5.6 + this.trustedSet.updateSnapshot(bytesData); + // Client workflow 5.5.7: persist snapshot metadata file + this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData); + } + catch (error) { + throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`); + } + } + } + // Load local and remote targets metadata. + // Client workflow 5.6: update targets role + async loadTargets(role, parentRole) { + if (this.trustedSet.getRole(role)) { + return this.trustedSet.getRole(role); + } + try { + const buffer = this.loadLocalMetadata(role); + this.trustedSet.updateDelegatedTargets(buffer, role, parentRole); + } + catch (error) { + // Local 'role' does not exist or is invalid: update from remote + if (!this.trustedSet.snapshot) { + throw new ReferenceError('No snapshot metadata'); + } + const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`]; + // TODO: use length for fetching + const maxLength = metaInfo.length || this.config.targetsMaxLength; + const version = this.trustedSet.root.signed.consistentSnapshot + ? metaInfo.version + : undefined; + const encodedRole = encodeURIComponent(role); + const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`); + try { + // Client workflow 5.6.1: download targets metadata file + const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength); + // Client workflow 5.6.2 - 5.6.6 + this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole); + // Client workflow 5.6.7: persist targets metadata file + this.persistMetadata(role, bytesData); + } + catch (error) { + throw new error_1.RuntimeError(`Unable to load targets error ${error}`); + } + } + return this.trustedSet.getRole(role); + } + async preorderDepthFirstWalk(targetPath) { + // Interrogates the tree of target delegations in order of appearance + // (which implicitly order trustworthiness), and returns the matching + // target found in the most trusted role. + // List of delegations to be interrogated. A (role, parent role) pair + // is needed to load and verify the delegated targets metadata. + const delegationsToVisit = [ + { + roleName: models_1.MetadataKind.Targets, + parentRoleName: models_1.MetadataKind.Root, + }, + ]; + const visitedRoleNames = new Set(); + // Client workflow 5.6.7: preorder depth-first traversal of the graph of + // target delegations + while (visitedRoleNames.size <= this.config.maxDelegations && + delegationsToVisit.length > 0) { + // Pop the role name from the top of the stack. + const { roleName, parentRoleName } = delegationsToVisit.pop(); + // Skip any visited current role to prevent cycles. + // Client workflow 5.6.7.1: skip already-visited roles + if (visitedRoleNames.has(roleName)) { + continue; + } + // The metadata for 'role_name' must be downloaded/updated before + // its targets, delegations, and child roles can be inspected. + const targets = (await this.loadTargets(roleName, parentRoleName)) + ?.signed; + if (!targets) { + continue; + } + const target = targets.targets?.[targetPath]; + if (target) { + return target; + } + // After preorder check, add current role to set of visited roles. + visitedRoleNames.add(roleName); + if (targets.delegations) { + const childRolesToVisit = []; + // NOTE: This may be a slow operation if there are many delegated roles. + const rolesForTarget = targets.delegations.rolesForTarget(targetPath); + for (const { role: childName, terminating } of rolesForTarget) { + childRolesToVisit.push({ + roleName: childName, + parentRoleName: roleName, + }); + // Client workflow 5.6.7.2.1 + if (terminating) { + delegationsToVisit.splice(0); // empty the array + break; + } + } + childRolesToVisit.reverse(); + delegationsToVisit.push(...childRolesToVisit); + } + } + return; // no matching target found + } + generateTargetPath(targetInfo) { + if (!this.targetDir) { + throw new error_1.ValueError('Target directory not set'); + } + // URL encode target path + const filePath = encodeURIComponent(targetInfo.path); + return path.join(this.targetDir, filePath); + } + persistMetadata(metaDataName, bytesData) { + const encodedName = encodeURIComponent(metaDataName); + try { + const filePath = path.join(this.dir, `${encodedName}.json`); + log('WRITE %s', filePath); + fs.writeFileSync(filePath, bytesData.toString('utf8')); + } + catch (error) { + throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`); + } + } +} +exports.Updater = Updater; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js new file mode 100644 index 0000000000000..923eef6044bcc --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js @@ -0,0 +1,25 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.withTempFile = void 0; +const promises_1 = __importDefault(require("fs/promises")); +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +// Invokes the given handler with the path to a temporary file. The file +// is deleted after the handler returns. +const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile'))); +exports.withTempFile = withTempFile; +// Invokes the given handler with a temporary directory. The directory is +// deleted after the handler returns. +const withTempDir = async (handler) => { + const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir()); + const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep); + try { + return await handler(dir); + } + finally { + await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 }); + } +}; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js b/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js new file mode 100644 index 0000000000000..359d1f3ef385b --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.join = join; +const url_1 = require("url"); +function join(base, path) { + return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString(); +} +function ensureTrailingSlash(path) { + return path.endsWith('/') ? path : path + '/'; +} +function removeLeadingSlash(path) { + return path.startsWith('/') ? path.slice(1) : path; +} diff --git a/node_modules/pacote/node_modules/tuf-js/package.json b/node_modules/pacote/node_modules/tuf-js/package.json new file mode 100644 index 0000000000000..e79a3d45f3f06 --- /dev/null +++ b/node_modules/pacote/node_modules/tuf-js/package.json @@ -0,0 +1,43 @@ +{ + "name": "tuf-js", + "version": "3.0.1", + "description": "JavaScript implementation of The Update Framework (TUF)", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "files": [ + "dist" + ], + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", + "devDependencies": { + "@tufjs/repo-mock": "3.0.1", + "@types/debug": "^4.1.12", + "@types/make-fetch-happen": "^10.0.4" + }, + "dependencies": { + "@tufjs/models": "3.0.1", + "debug": "^4.3.6", + "make-fetch-happen": "^14.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index 0eb8261af96e0..71c9aa1ce3257 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "19.0.0", + "version": "19.0.1", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -59,7 +59,7 @@ "npm-registry-fetch": "^18.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", - "sigstore": "^2.2.0", + "sigstore": "^3.0.0", "ssri": "^12.0.0", "tar": "^6.1.11" }, diff --git a/package-lock.json b/package-lock.json index 6d84e53ec8e9e..91998e6447da7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -138,7 +138,7 @@ "npm-registry-fetch": "^18.0.2", "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", - "pacote": "^19.0.0", + "pacote": "^19.0.1", "parse-conflict-json": "^4.0.0", "proc-log": "^5.0.0", "qrcode-terminal": "^0.12.0", @@ -3479,7 +3479,6 @@ "version": "2.3.2", "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz", "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==", - "inBundle": true, "license": "Apache-2.0", "dependencies": { "@sigstore/protobuf-specs": "^0.3.2" @@ -3492,7 +3491,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz", "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", - "inBundle": true, "license": "Apache-2.0", "engines": { "node": "^16.14.0 || >=18.0.0" @@ -3512,7 +3510,6 @@ "version": "2.3.2", "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz", "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", - "inBundle": true, "license": "Apache-2.0", "dependencies": { "@sigstore/bundle": "^2.3.2", @@ -3530,7 +3527,6 @@ "version": "2.2.2", "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", - "inBundle": true, "license": "ISC", "dependencies": { "agent-base": "^7.1.0", @@ -3547,7 +3543,6 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", - "inBundle": true, "license": "ISC", "dependencies": { "semver": "^7.3.5" @@ -3560,7 +3555,6 @@ "version": "18.0.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz", "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==", - "inBundle": true, "license": "ISC", "dependencies": { "@npmcli/fs": "^3.1.0", @@ -3584,7 +3578,6 @@ "version": "13.0.1", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", - "inBundle": true, "license": "ISC", "dependencies": { "@npmcli/agent": "^2.0.0", @@ -3608,7 +3601,6 @@ "version": "3.0.5", "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", - "inBundle": true, "license": "MIT", "dependencies": { "minipass": "^7.0.3", @@ -3626,7 +3618,6 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", - "inBundle": true, "license": "ISC", "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -3636,7 +3627,6 @@ "version": "10.0.6", "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", - "inBundle": true, "license": "ISC", "dependencies": { "minipass": "^7.0.3" @@ -3649,7 +3639,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", - "inBundle": true, "license": "ISC", "dependencies": { "unique-slug": "^4.0.0" @@ -3662,7 +3651,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", - "inBundle": true, "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4" @@ -3689,7 +3677,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz", "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==", - "inBundle": true, "license": "Apache-2.0", "dependencies": { "@sigstore/bundle": "^2.3.2", @@ -12092,9 +12079,9 @@ "license": "BlueOak-1.0.0" }, "node_modules/pacote": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-19.0.0.tgz", - "integrity": "sha512-953pUJqILTeaRvKFcQ78unsNc3Nl4PyVHTTsAUmvSmJ0NXs0LTWKAl5tMF2CXPRXA16RdCMYI9EKlV4CCi2T5g==", + "version": "19.0.1", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-19.0.1.tgz", + "integrity": "sha512-zIpxWAsr/BvhrkSruspG8aqCQUUrWtpwx0GjiRZQhEM/pZXrigA32ElN3vTcCPUDOFmHr6SFxwYrvVUs5NTEUg==", "inBundle": true, "license": "ISC", "dependencies": { @@ -12112,7 +12099,7 @@ "npm-registry-fetch": "^18.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", - "sigstore": "^2.2.0", + "sigstore": "^3.0.0", "ssri": "^12.0.0", "tar": "^6.1.11" }, @@ -12123,6 +12110,123 @@ "node": "^18.17.0 || >=20.5.0" } }, + "node_modules/pacote/node_modules/@sigstore/bundle": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.0.0.tgz", + "integrity": "sha512-XDUYX56iMPAn/cdgh/DTJxz5RWmqKV4pwvUAEKEWJl+HzKdCd/24wUa9JYNMlDSCb7SUHAdtksxYX779Nne/Zg==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/pacote/node_modules/@sigstore/core": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", + "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", + "inBundle": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/pacote/node_modules/@sigstore/sign": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.0.0.tgz", + "integrity": "sha512-UjhDMQOkyDoktpXoc5YPJpJK6IooF2gayAr5LvXI4EL7O0vd58okgfRcxuaH+YTdhvb5aa1Q9f+WJ0c2sVuYIw==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^14.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/pacote/node_modules/@sigstore/tuf": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.0.0.tgz", + "integrity": "sha512-9Xxy/8U5OFJu7s+OsHzI96IX/OzjF/zj0BSSaWhgJgTqtlBhQIV2xdrQI5qxLD7+CWWDepadnXAxzaZ3u9cvRw==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/pacote/node_modules/@sigstore/verify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.0.0.tgz", + "integrity": "sha512-Ggtq2GsJuxFNUvQzLoXqRwS4ceRfLAJnrIHUDrzAD0GgnOhwujJkKkxM/s5Bako07c3WtAs/sZo5PJq7VHjeDg==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/pacote/node_modules/@tufjs/models": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", + "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/pacote/node_modules/sigstore": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.0.0.tgz", + "integrity": "sha512-PHMifhh3EN4loMcHCz6l3v/luzgT3za+9f8subGgeMNjbJjzH4Ij/YoX3Gvu+kaouJRIlVdTHHCREADYf+ZteA==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^3.0.0", + "@sigstore/tuf": "^3.0.0", + "@sigstore/verify": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/pacote/node_modules/tuf-js": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz", + "integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==", + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tufjs/models": "3.0.1", + "debug": "^4.3.6", + "make-fetch-happen": "^14.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -13569,7 +13673,6 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz", "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==", - "inBundle": true, "license": "Apache-2.0", "dependencies": { "@sigstore/bundle": "^2.3.2", diff --git a/package.json b/package.json index 42484fce46f5a..c08c442655654 100644 --- a/package.json +++ b/package.json @@ -103,7 +103,7 @@ "npm-registry-fetch": "^18.0.2", "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", - "pacote": "^19.0.0", + "pacote": "^19.0.1", "parse-conflict-json": "^4.0.0", "proc-log": "^5.0.0", "qrcode-terminal": "^0.12.0", From 0dedd7367bee845dbc46281ee09d3ac401005f86 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:10:31 -0800 Subject: [PATCH 06/16] deps: ansi-regex@6.1.0 --- .../@isaacs/cliui/node_modules/ansi-regex/index.js | 6 ++++-- .../cliui/node_modules/ansi-regex/package.json | 9 ++++++--- .../wrap-ansi/node_modules/ansi-regex/index.js | 6 ++++-- .../wrap-ansi/node_modules/ansi-regex/package.json | 9 ++++++--- package-lock.json | 12 ++++++------ 5 files changed, 26 insertions(+), 16 deletions(-) diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js index 130a0929b8ce8..ddfdba39a783a 100644 --- a/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js +++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js @@ -1,7 +1,9 @@ export default function ansiRegex({onlyFirst = false} = {}) { + // Valid string terminator sequences are BEL, ESC\, and 0x9c + const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)'; const pattern = [ - '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', - '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' + `[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`, + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))', ].join('|'); return new RegExp(pattern, onlyFirst ? undefined : 'g'); diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json index 7bbb563bf2a70..49f3f61021512 100644 --- a/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json +++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json @@ -1,6 +1,6 @@ { "name": "ansi-regex", - "version": "6.0.1", + "version": "6.1.0", "description": "Regular expression for matching ANSI escape codes", "license": "MIT", "repository": "chalk/ansi-regex", @@ -12,6 +12,8 @@ }, "type": "module", "exports": "./index.js", + "types": "./index.d.ts", + "sideEffects": false, "engines": { "node": ">=12" }, @@ -51,8 +53,9 @@ "pattern" ], "devDependencies": { + "ansi-escapes": "^5.0.0", "ava": "^3.15.0", - "tsd": "^0.14.0", - "xo": "^0.38.2" + "tsd": "^0.21.0", + "xo": "^0.54.2" } } diff --git a/node_modules/wrap-ansi/node_modules/ansi-regex/index.js b/node_modules/wrap-ansi/node_modules/ansi-regex/index.js index 130a0929b8ce8..ddfdba39a783a 100644 --- a/node_modules/wrap-ansi/node_modules/ansi-regex/index.js +++ b/node_modules/wrap-ansi/node_modules/ansi-regex/index.js @@ -1,7 +1,9 @@ export default function ansiRegex({onlyFirst = false} = {}) { + // Valid string terminator sequences are BEL, ESC\, and 0x9c + const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)'; const pattern = [ - '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', - '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' + `[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`, + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))', ].join('|'); return new RegExp(pattern, onlyFirst ? undefined : 'g'); diff --git a/node_modules/wrap-ansi/node_modules/ansi-regex/package.json b/node_modules/wrap-ansi/node_modules/ansi-regex/package.json index 7bbb563bf2a70..49f3f61021512 100644 --- a/node_modules/wrap-ansi/node_modules/ansi-regex/package.json +++ b/node_modules/wrap-ansi/node_modules/ansi-regex/package.json @@ -1,6 +1,6 @@ { "name": "ansi-regex", - "version": "6.0.1", + "version": "6.1.0", "description": "Regular expression for matching ANSI escape codes", "license": "MIT", "repository": "chalk/ansi-regex", @@ -12,6 +12,8 @@ }, "type": "module", "exports": "./index.js", + "types": "./index.d.ts", + "sideEffects": false, "engines": { "node": ">=12" }, @@ -51,8 +53,9 @@ "pattern" ], "devDependencies": { + "ansi-escapes": "^5.0.0", "ava": "^3.15.0", - "tsd": "^0.14.0", - "xo": "^0.38.2" + "tsd": "^0.21.0", + "xo": "^0.54.2" } } diff --git a/package-lock.json b/package-lock.json index 91998e6447da7..a72ece9382ccd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1983,9 +1983,9 @@ } }, "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "inBundle": true, "license": "MIT", "engines": { @@ -17731,9 +17731,9 @@ } }, "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "inBundle": true, "license": "MIT", "engines": { From 7c7870942432f87eddb70c668c907c6d885fc4d4 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:11:05 -0800 Subject: [PATCH 07/16] deps: @npmcli/metavuln-calculator@8.0.1 --- node_modules/.gitignore | 15 + .../node_modules/@sigstore/bundle/LICENSE | 202 +++++++ .../@sigstore/bundle/dist/build.js | 100 ++++ .../@sigstore/bundle/dist/bundle.js | 24 + .../@sigstore/bundle/dist/error.js | 25 + .../@sigstore/bundle/dist/index.js | 43 ++ .../@sigstore/bundle/dist/serialized.js | 49 ++ .../@sigstore/bundle/dist/utility.js | 2 + .../@sigstore/bundle/dist/validate.js | 199 +++++++ .../@sigstore/bundle/package.json | 35 ++ .../node_modules/@sigstore/core/LICENSE | 202 +++++++ .../@sigstore/core/dist/asn1/error.js | 24 + .../@sigstore/core/dist/asn1/index.js | 20 + .../@sigstore/core/dist/asn1/length.js | 62 +++ .../@sigstore/core/dist/asn1/obj.js | 152 ++++++ .../@sigstore/core/dist/asn1/parse.js | 124 +++++ .../@sigstore/core/dist/asn1/tag.js | 86 +++ .../@sigstore/core/dist/crypto.js | 60 +++ .../node_modules/@sigstore/core/dist/dsse.js | 30 ++ .../@sigstore/core/dist/encoding.js | 27 + .../node_modules/@sigstore/core/dist/index.js | 56 ++ .../node_modules/@sigstore/core/dist/json.js | 60 +++ .../node_modules/@sigstore/core/dist/oid.js | 14 + .../node_modules/@sigstore/core/dist/pem.js | 43 ++ .../@sigstore/core/dist/rfc3161/error.js | 21 + .../@sigstore/core/dist/rfc3161/index.js | 20 + .../@sigstore/core/dist/rfc3161/timestamp.js | 201 +++++++ .../@sigstore/core/dist/rfc3161/tstinfo.js | 61 +++ .../@sigstore/core/dist/stream.js | 115 ++++ .../@sigstore/core/dist/x509/cert.js | 230 ++++++++ .../@sigstore/core/dist/x509/ext.js | 145 +++++ .../@sigstore/core/dist/x509/index.js | 23 + .../@sigstore/core/dist/x509/sct.js | 141 +++++ .../node_modules/@sigstore/core/package.json | 31 ++ .../node_modules/@sigstore/sign/LICENSE | 202 +++++++ .../@sigstore/sign/dist/bundler/base.js | 50 ++ .../@sigstore/sign/dist/bundler/bundle.js | 71 +++ .../@sigstore/sign/dist/bundler/dsse.js | 46 ++ .../@sigstore/sign/dist/bundler/index.js | 7 + .../@sigstore/sign/dist/bundler/message.js | 30 ++ .../node_modules/@sigstore/sign/dist/error.js | 39 ++ .../@sigstore/sign/dist/external/error.js | 26 + .../@sigstore/sign/dist/external/fetch.js | 98 ++++ .../@sigstore/sign/dist/external/fulcio.js | 41 ++ .../@sigstore/sign/dist/external/rekor.js | 80 +++ .../@sigstore/sign/dist/external/tsa.js | 38 ++ .../@sigstore/sign/dist/identity/ci.js | 73 +++ .../@sigstore/sign/dist/identity/index.js | 20 + .../@sigstore/sign/dist/identity/provider.js | 2 + .../node_modules/@sigstore/sign/dist/index.js | 17 + .../@sigstore/sign/dist/signer/fulcio/ca.js | 59 +++ .../sign/dist/signer/fulcio/ephemeral.js | 45 ++ .../sign/dist/signer/fulcio/index.js | 87 +++ .../@sigstore/sign/dist/signer/index.js | 22 + .../@sigstore/sign/dist/signer/signer.js | 17 + .../@sigstore/sign/dist/types/fetch.js | 2 + .../@sigstore/sign/dist/util/index.js | 49 ++ .../@sigstore/sign/dist/util/oidc.js | 30 ++ .../@sigstore/sign/dist/util/ua.js | 32 ++ .../@sigstore/sign/dist/witness/index.js | 24 + .../sign/dist/witness/tlog/client.js | 61 +++ .../@sigstore/sign/dist/witness/tlog/entry.js | 140 +++++ .../@sigstore/sign/dist/witness/tlog/index.js | 82 +++ .../@sigstore/sign/dist/witness/tsa/client.js | 46 ++ .../@sigstore/sign/dist/witness/tsa/index.js | 44 ++ .../@sigstore/sign/dist/witness/witness.js | 2 + .../node_modules/@sigstore/sign/package.json | 46 ++ .../node_modules/@sigstore/tuf/LICENSE | 202 +++++++ .../@sigstore/tuf/dist/appdata.js | 43 ++ .../node_modules/@sigstore/tuf/dist/client.js | 111 ++++ .../node_modules/@sigstore/tuf/dist/error.js | 12 + .../node_modules/@sigstore/tuf/dist/index.js | 56 ++ .../node_modules/@sigstore/tuf/dist/target.js | 79 +++ .../node_modules/@sigstore/tuf/package.json | 41 ++ .../node_modules/@sigstore/tuf/seeds.json | 1 + .../@sigstore/verify/dist/bundle/dsse.js | 43 ++ .../@sigstore/verify/dist/bundle/index.js | 57 ++ .../@sigstore/verify/dist/bundle/message.js | 36 ++ .../@sigstore/verify/dist/error.js | 32 ++ .../@sigstore/verify/dist/index.js | 28 + .../@sigstore/verify/dist/key/certificate.js | 205 ++++++++ .../@sigstore/verify/dist/key/index.js | 72 +++ .../@sigstore/verify/dist/key/sct.js | 78 +++ .../@sigstore/verify/dist/policy.js | 24 + .../@sigstore/verify/dist/shared.types.js | 2 + .../verify/dist/timestamp/checkpoint.js | 157 ++++++ .../@sigstore/verify/dist/timestamp/index.js | 46 ++ .../@sigstore/verify/dist/timestamp/merkle.js | 104 ++++ .../@sigstore/verify/dist/timestamp/set.js | 60 +++ .../@sigstore/verify/dist/timestamp/tsa.js | 73 +++ .../@sigstore/verify/dist/tlog/dsse.js | 57 ++ .../verify/dist/tlog/hashedrekord.js | 51 ++ .../@sigstore/verify/dist/tlog/index.js | 47 ++ .../@sigstore/verify/dist/tlog/intoto.js | 62 +++ .../@sigstore/verify/dist/trust/filter.js | 23 + .../@sigstore/verify/dist/trust/index.js | 86 +++ .../verify/dist/trust/trust.types.js | 2 + .../@sigstore/verify/dist/verifier.js | 141 +++++ .../@sigstore/verify/package.json | 36 ++ .../node_modules/@tufjs/models/LICENSE | 21 + .../node_modules/@tufjs/models/dist/base.js | 92 ++++ .../@tufjs/models/dist/delegations.js | 115 ++++ .../node_modules/@tufjs/models/dist/error.js | 27 + .../node_modules/@tufjs/models/dist/file.js | 183 +++++++ .../node_modules/@tufjs/models/dist/index.js | 24 + .../node_modules/@tufjs/models/dist/key.js | 85 +++ .../@tufjs/models/dist/metadata.js | 160 ++++++ .../node_modules/@tufjs/models/dist/role.js | 299 +++++++++++ .../node_modules/@tufjs/models/dist/root.js | 116 ++++ .../@tufjs/models/dist/signature.js | 38 ++ .../@tufjs/models/dist/snapshot.js | 71 +++ .../@tufjs/models/dist/targets.js | 92 ++++ .../@tufjs/models/dist/timestamp.js | 58 ++ .../@tufjs/models/dist/utils/guard.js | 32 ++ .../@tufjs/models/dist/utils/index.js | 28 + .../@tufjs/models/dist/utils/key.js | 142 +++++ .../@tufjs/models/dist/utils/oid.js | 26 + .../@tufjs/models/dist/utils/types.js | 2 + .../@tufjs/models/dist/utils/verify.js | 13 + .../node_modules/@tufjs/models/package.json | 37 ++ .../node_modules/pacote/LICENSE | 15 + .../node_modules/pacote/bin/index.js | 158 ++++++ .../node_modules/pacote/lib/dir.js | 105 ++++ .../node_modules/pacote/lib/fetcher.js | 497 ++++++++++++++++++ .../node_modules/pacote/lib/file.js | 94 ++++ .../node_modules/pacote/lib/git.js | 317 +++++++++++ .../node_modules/pacote/lib/index.js | 23 + .../node_modules/pacote/lib/registry.js | 369 +++++++++++++ .../node_modules/pacote/lib/remote.js | 89 ++++ .../pacote/lib/util/add-git-sha.js | 15 + .../node_modules/pacote/lib/util/cache-dir.js | 15 + .../pacote/lib/util/is-package-bin.js | 25 + .../node_modules/pacote/lib/util/npm.js | 14 + .../node_modules/pacote/lib/util/protected.js | 5 + .../pacote/lib/util/tar-create-options.js | 31 ++ .../pacote/lib/util/trailing-slashes.js | 10 + .../node_modules/pacote/package.json | 79 +++ .../node_modules/sigstore/LICENSE | 202 +++++++ .../node_modules/sigstore/dist/config.js | 120 +++++ .../node_modules/sigstore/dist/index.js | 34 ++ .../node_modules/sigstore/dist/sigstore.js | 102 ++++ .../node_modules/sigstore/package.json | 47 ++ .../node_modules/tuf-js/LICENSE | 21 + .../node_modules/tuf-js/dist/config.js | 15 + .../node_modules/tuf-js/dist/error.js | 48 ++ .../node_modules/tuf-js/dist/fetcher.js | 84 +++ .../node_modules/tuf-js/dist/index.js | 9 + .../node_modules/tuf-js/dist/store.js | 208 ++++++++ .../node_modules/tuf-js/dist/updater.js | 350 ++++++++++++ .../node_modules/tuf-js/dist/utils/tmpfile.js | 25 + .../node_modules/tuf-js/dist/utils/url.js | 13 + .../node_modules/tuf-js/package.json | 43 ++ .../@npmcli/metavuln-calculator/package.json | 4 +- package-lock.json | 148 +++++- 154 files changed, 11326 insertions(+), 6 deletions(-) create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/LICENSE create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/build.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/bundle.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/serialized.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/utility.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/validate.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/LICENSE create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/length.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/obj.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/parse.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/tag.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/crypto.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/dsse.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/encoding.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/json.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/oid.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/pem.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/timestamp.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/stream.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/cert.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/ext.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/sct.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/LICENSE create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/base.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/bundle.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/dsse.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/message.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fetch.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fulcio.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/rekor.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/tsa.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/ci.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/provider.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/signer.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/types/fetch.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/oidc.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/ua.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/client.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/entry.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/client.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/witness.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/LICENSE create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/dsse.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/message.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/certificate.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/sct.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/policy.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/shared.types.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/merkle.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/set.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/tsa.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/dsse.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/intoto.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/filter.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/trust.types.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/verifier.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/LICENSE create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/base.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/delegations.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/file.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/key.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/metadata.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/role.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/root.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/signature.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/snapshot.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/targets.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/timestamp.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/guard.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/key.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/oid.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/types.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/verify.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE create mode 100755 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/config.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/sigstore.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/package.json create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index ca0c5fbd69ae3..1a0706de3b8f3 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -22,6 +22,21 @@ !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator +!/@npmcli/metavuln-calculator/node_modules/ +/@npmcli/metavuln-calculator/node_modules/* +!/@npmcli/metavuln-calculator/node_modules/@sigstore/ +/@npmcli/metavuln-calculator/node_modules/@sigstore/* +!/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle +!/@npmcli/metavuln-calculator/node_modules/@sigstore/core +!/@npmcli/metavuln-calculator/node_modules/@sigstore/sign +!/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf +!/@npmcli/metavuln-calculator/node_modules/@sigstore/verify +!/@npmcli/metavuln-calculator/node_modules/@tufjs/ +/@npmcli/metavuln-calculator/node_modules/@tufjs/* +!/@npmcli/metavuln-calculator/node_modules/@tufjs/models +!/@npmcli/metavuln-calculator/node_modules/pacote +!/@npmcli/metavuln-calculator/node_modules/sigstore +!/@npmcli/metavuln-calculator/node_modules/tuf-js !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/build.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/build.js new file mode 100644 index 0000000000000..ade736407554c --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/build.js @@ -0,0 +1,100 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toMessageSignatureBundle = toMessageSignatureBundle; +exports.toDSSEBundle = toDSSEBundle; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(options) { + return { + mediaType: options.certificateChain + ? bundle_1.BUNDLE_V02_MEDIA_TYPE + : bundle_1.BUNDLE_V03_MEDIA_TYPE, + content: { + $case: 'messageSignature', + messageSignature: { + messageDigest: { + algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, + digest: options.digest, + }, + signature: options.signature, + }, + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(options) { + return { + mediaType: options.certificateChain + ? bundle_1.BUNDLE_V02_MEDIA_TYPE + : bundle_1.BUNDLE_V03_MEDIA_TYPE, + content: { + $case: 'dsseEnvelope', + dsseEnvelope: toEnvelope(options), + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +function toEnvelope(options) { + return { + payloadType: options.artifactType, + payload: options.artifact, + signatures: [toSignature(options)], + }; +} +function toSignature(options) { + return { + keyid: options.keyHint || '', + sig: options.signature, + }; +} +// Verification material +function toVerificationMaterial(options) { + return { + content: toKeyContent(options), + tlogEntries: [], + timestampVerificationData: { rfc3161Timestamps: [] }, + }; +} +function toKeyContent(options) { + if (options.certificate) { + if (options.certificateChain) { + return { + $case: 'x509CertificateChain', + x509CertificateChain: { + certificates: [{ rawBytes: options.certificate }], + }, + }; + } + else { + return { + $case: 'certificate', + certificate: { rawBytes: options.certificate }, + }; + } + } + else { + return { + $case: 'publicKey', + publicKey: { + hint: options.keyHint || '', + }, + }; + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/bundle.js new file mode 100644 index 0000000000000..eb67a0ddc17bb --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/bundle.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0; +exports.isBundleWithCertificateChain = isBundleWithCertificateChain; +exports.isBundleWithPublicKey = isBundleWithPublicKey; +exports.isBundleWithMessageSignature = isBundleWithMessageSignature; +exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope; +exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; +exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2'; +exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3'; +exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json'; +// Type guards for bundle variants. +function isBundleWithCertificateChain(b) { + return b.verificationMaterial.content.$case === 'x509CertificateChain'; +} +function isBundleWithPublicKey(b) { + return b.verificationMaterial.content.$case === 'publicKey'; +} +function isBundleWithMessageSignature(b) { + return b.content.$case === 'messageSignature'; +} +function isBundleWithDsseEnvelope(b) { + return b.content.$case === 'dsseEnvelope'; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/error.js new file mode 100644 index 0000000000000..f84295323b812 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/error.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ValidationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ValidationError extends Error { + constructor(message, fields) { + super(message); + this.fields = fields; + } +} +exports.ValidationError = ValidationError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/index.js new file mode 100644 index 0000000000000..1b012acad4d85 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/index.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var build_1 = require("./build"); +Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } }); +Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } }); +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } }); +Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } }); +Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } }); +Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } }); +Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } }); +var serialized_1 = require("./serialized"); +Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } }); +Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } }); +Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } }); +Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } }); +var validate_1 = require("./validate"); +Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } }); +Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } }); +Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } }); +Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } }); +Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/serialized.js new file mode 100644 index 0000000000000..be0d2a2d54d09 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/serialized.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +const validate_1 = require("./validate"); +const bundleFromJSON = (obj) => { + const bundle = protobuf_specs_1.Bundle.fromJSON(obj); + switch (bundle.mediaType) { + case bundle_1.BUNDLE_V01_MEDIA_TYPE: + (0, validate_1.assertBundleV01)(bundle); + break; + case bundle_1.BUNDLE_V02_MEDIA_TYPE: + (0, validate_1.assertBundleV02)(bundle); + break; + default: + (0, validate_1.assertBundleLatest)(bundle); + break; + } + return bundle; +}; +exports.bundleFromJSON = bundleFromJSON; +const bundleToJSON = (bundle) => { + return protobuf_specs_1.Bundle.toJSON(bundle); +}; +exports.bundleToJSON = bundleToJSON; +const envelopeFromJSON = (obj) => { + return protobuf_specs_1.Envelope.fromJSON(obj); +}; +exports.envelopeFromJSON = envelopeFromJSON; +const envelopeToJSON = (envelope) => { + return protobuf_specs_1.Envelope.toJSON(envelope); +}; +exports.envelopeToJSON = envelopeToJSON; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/utility.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/utility.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/validate.js new file mode 100644 index 0000000000000..21b8b5ee293ba --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/validate.js @@ -0,0 +1,199 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertBundle = assertBundle; +exports.assertBundleV01 = assertBundleV01; +exports.isBundleV01 = isBundleV01; +exports.assertBundleV02 = assertBundleV02; +exports.assertBundleLatest = assertBundleLatest; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("./error"); +// Performs basic validation of a Sigstore bundle to ensure that all required +// fields are populated. This is not a complete validation of the bundle, but +// rather a check that the bundle is in a valid state to be processed by the +// rest of the code. +function assertBundle(b) { + const invalidValues = validateBundleBase(b); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +// Asserts that the given bundle conforms to the v0.1 bundle format. +function assertBundleV01(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionPromise(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues); + } +} +// Type guard to determine if Bundle is a v0.1 bundle. +function isBundleV01(b) { + try { + assertBundleV01(b); + return true; + } + catch (e) { + return false; + } +} +// Asserts that the given bundle conforms to the v0.2 bundle format. +function assertBundleV02(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionProof(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues); + } +} +// Asserts that the given bundle conforms to the newest (0.3) bundle format. +function assertBundleLatest(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionProof(b)); + invalidValues.push(...validateNoCertificateChain(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +function validateBundleBase(b) { + const invalidValues = []; + // Media type validation + if (b.mediaType === undefined || + (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) && + !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) { + invalidValues.push('mediaType'); + } + // Content-related validation + if (b.content === undefined) { + invalidValues.push('content'); + } + else { + switch (b.content.$case) { + case 'messageSignature': + if (b.content.messageSignature.messageDigest === undefined) { + invalidValues.push('content.messageSignature.messageDigest'); + } + else { + if (b.content.messageSignature.messageDigest.digest.length === 0) { + invalidValues.push('content.messageSignature.messageDigest.digest'); + } + } + if (b.content.messageSignature.signature.length === 0) { + invalidValues.push('content.messageSignature.signature'); + } + break; + case 'dsseEnvelope': + if (b.content.dsseEnvelope.payload.length === 0) { + invalidValues.push('content.dsseEnvelope.payload'); + } + if (b.content.dsseEnvelope.signatures.length !== 1) { + invalidValues.push('content.dsseEnvelope.signatures'); + } + else { + if (b.content.dsseEnvelope.signatures[0].sig.length === 0) { + invalidValues.push('content.dsseEnvelope.signatures[0].sig'); + } + } + break; + } + } + // Verification material-related validation + if (b.verificationMaterial === undefined) { + invalidValues.push('verificationMaterial'); + } + else { + if (b.verificationMaterial.content === undefined) { + invalidValues.push('verificationMaterial.content'); + } + else { + switch (b.verificationMaterial.content.$case) { + case 'x509CertificateChain': + if (b.verificationMaterial.content.x509CertificateChain.certificates + .length === 0) { + invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates'); + } + b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => { + if (cert.rawBytes.length === 0) { + invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`); + } + }); + break; + case 'certificate': + if (b.verificationMaterial.content.certificate.rawBytes.length === 0) { + invalidValues.push('verificationMaterial.content.certificate.rawBytes'); + } + break; + } + } + if (b.verificationMaterial.tlogEntries === undefined) { + invalidValues.push('verificationMaterial.tlogEntries'); + } + else { + if (b.verificationMaterial.tlogEntries.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.logId === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`); + } + if (entry.kindVersion === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`); + } + }); + } + } + } + return invalidValues; +} +// Necessary for V01 bundles +function validateInclusionPromise(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionPromise === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`); + } + }); + } + return invalidValues; +} +// Necessary for V02 and later bundles +function validateInclusionProof(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionProof === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`); + } + else { + if (entry.inclusionProof.checkpoint === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`); + } + } + }); + } + return invalidValues; +} +// Necessary for V03 and later bundles +function validateNoCertificateChain(b) { + const invalidValues = []; + /* istanbul ignore next */ + if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') { + invalidValues.push('verificationMaterial.content.$case'); + } + return invalidValues; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/package.json new file mode 100644 index 0000000000000..ee5d2b92b801a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/package.json @@ -0,0 +1,35 @@ +{ + "name": "@sigstore/bundle", + "version": "3.0.0", + "description": "Sigstore bundle type", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/error.js new file mode 100644 index 0000000000000..17d93b0f7e706 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/error.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1TypeError = exports.ASN1ParseError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ASN1ParseError extends Error { +} +exports.ASN1ParseError = ASN1ParseError; +class ASN1TypeError extends Error { +} +exports.ASN1TypeError = ASN1TypeError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/index.js new file mode 100644 index 0000000000000..348b2ea4022e5 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var obj_1 = require("./obj"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/length.js new file mode 100644 index 0000000000000..cb7ebf09dbefa --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/length.js @@ -0,0 +1,62 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.decodeLength = decodeLength; +exports.encodeLength = encodeLength; +const error_1 = require("./error"); +// Decodes the length of a DER-encoded ANS.1 element from the supplied stream. +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes +function decodeLength(stream) { + const buf = stream.getUint8(); + // If the most significant bit is UNSET the length is just the value of the + // byte. + if ((buf & 0x80) === 0x00) { + return buf; + } + // Otherwise, the lower 7 bits of the first byte indicate the number of bytes + // that follow to encode the length. + const byteCount = buf & 0x7f; + // Ensure the encoded length can safely fit in a JS number. + if (byteCount > 6) { + throw new error_1.ASN1ParseError('length exceeds 6 byte limit'); + } + // Iterate over the bytes that encode the length. + let len = 0; + for (let i = 0; i < byteCount; i++) { + len = len * 256 + stream.getUint8(); + } + // This is a valid ASN.1 length encoding, but we don't support it. + if (len === 0) { + throw new error_1.ASN1ParseError('indefinite length encoding not supported'); + } + return len; +} +// Translates the supplied value to a DER-encoded length. +function encodeLength(len) { + if (len < 128) { + return Buffer.from([len]); + } + // Bitwise operations on large numbers are not supported in JS, so we need to + // use BigInts. + let val = BigInt(len); + const bytes = []; + while (val > 0n) { + bytes.unshift(Number(val & 255n)); + val = val >> 8n; + } + return Buffer.from([0x80 | bytes.length, ...bytes]); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/obj.js new file mode 100644 index 0000000000000..5f9ac9cdbc493 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/obj.js @@ -0,0 +1,152 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const stream_1 = require("../stream"); +const error_1 = require("./error"); +const length_1 = require("./length"); +const parse_1 = require("./parse"); +const tag_1 = require("./tag"); +class ASN1Obj { + constructor(tag, value, subs) { + this.tag = tag; + this.value = value; + this.subs = subs; + } + // Constructs an ASN.1 object from a Buffer of DER-encoded bytes. + static parseBuffer(buf) { + return parseStream(new stream_1.ByteStream(buf)); + } + toDER() { + const valueStream = new stream_1.ByteStream(); + if (this.subs.length > 0) { + for (const sub of this.subs) { + valueStream.appendView(sub.toDER()); + } + } + else { + valueStream.appendView(this.value); + } + const value = valueStream.buffer; + // Concat tag/length/value + const obj = new stream_1.ByteStream(); + obj.appendChar(this.tag.toDER()); + obj.appendView((0, length_1.encodeLength)(value.length)); + obj.appendView(value); + return obj.buffer; + } + ///////////////////////////////////////////////////////////////////////////// + // Convenience methods for parsing ASN.1 primitives into JS types + // Returns the ASN.1 object's value as a boolean. Throws an error if the + // object is not a boolean. + toBoolean() { + if (!this.tag.isBoolean()) { + throw new error_1.ASN1TypeError('not a boolean'); + } + return (0, parse_1.parseBoolean)(this.value); + } + // Returns the ASN.1 object's value as a BigInt. Throws an error if the + // object is not an integer. + toInteger() { + if (!this.tag.isInteger()) { + throw new error_1.ASN1TypeError('not an integer'); + } + return (0, parse_1.parseInteger)(this.value); + } + // Returns the ASN.1 object's value as an OID string. Throws an error if the + // object is not an OID. + toOID() { + if (!this.tag.isOID()) { + throw new error_1.ASN1TypeError('not an OID'); + } + return (0, parse_1.parseOID)(this.value); + } + // Returns the ASN.1 object's value as a Date. Throws an error if the object + // is not either a UTCTime or a GeneralizedTime. + toDate() { + switch (true) { + case this.tag.isUTCTime(): + return (0, parse_1.parseTime)(this.value, true); + case this.tag.isGeneralizedTime(): + return (0, parse_1.parseTime)(this.value, false); + default: + throw new error_1.ASN1TypeError('not a date'); + } + } + // Returns the ASN.1 object's value as a number[] where each number is the + // value of a bit in the bit string. Throws an error if the object is not a + // bit string. + toBitString() { + if (!this.tag.isBitString()) { + throw new error_1.ASN1TypeError('not a bit string'); + } + return (0, parse_1.parseBitString)(this.value); + } +} +exports.ASN1Obj = ASN1Obj; +///////////////////////////////////////////////////////////////////////////// +// Internal stream parsing functions +function parseStream(stream) { + // Parse tag, length, and value from stream + const tag = new tag_1.ASN1Tag(stream.getUint8()); + const len = (0, length_1.decodeLength)(stream); + const value = stream.slice(stream.position, len); + const start = stream.position; + let subs = []; + // If the object is constructed, parse its children. Sometimes, children + // are embedded in OCTESTRING objects, so we need to check those + // for children as well. + if (tag.constructed) { + subs = collectSubs(stream, len); + } + else if (tag.isOctetString()) { + // Attempt to parse children of OCTETSTRING objects. If anything fails, + // assume the object is not constructed and treat as primitive. + try { + subs = collectSubs(stream, len); + } + catch (e) { + // Fail silently and treat as primitive + } + } + // If there are no children, move stream cursor to the end of the object + if (subs.length === 0) { + stream.seek(start + len); + } + return new ASN1Obj(tag, value, subs); +} +function collectSubs(stream, len) { + // Calculate end of object content + const end = stream.position + len; + // Make sure there are enough bytes left in the stream. This should never + // happen, cause it'll get caught when the stream is sliced in parseStream. + // Leaving as an extra check just in case. + /* istanbul ignore if */ + if (end > stream.length) { + throw new error_1.ASN1ParseError('invalid length'); + } + // Parse all children + const subs = []; + while (stream.position < end) { + subs.push(parseStream(stream)); + } + // When we're done parsing children, we should be at the end of the object + if (stream.position !== end) { + throw new error_1.ASN1ParseError('invalid length'); + } + return subs; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/parse.js new file mode 100644 index 0000000000000..7fbb42632c60e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/parse.js @@ -0,0 +1,124 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseInteger = parseInteger; +exports.parseStringASCII = parseStringASCII; +exports.parseTime = parseTime; +exports.parseOID = parseOID; +exports.parseBoolean = parseBoolean; +exports.parseBitString = parseBitString; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; +const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; +// Parse a BigInt from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer +function parseInteger(buf) { + let pos = 0; + const end = buf.length; + let val = buf[pos]; + const neg = val > 0x7f; + // Consume any padding bytes + const pad = neg ? 0xff : 0x00; + while (val == pad && ++pos < end) { + val = buf[pos]; + } + // Calculate remaining bytes to read + const len = end - pos; + if (len === 0) + return BigInt(neg ? -1 : 0); + // Handle two's complement for negative numbers + val = neg ? val - 256 : val; + // Parse remaining bytes + let n = BigInt(val); + for (let i = pos + 1; i < end; ++i) { + n = n * BigInt(256) + BigInt(buf[i]); + } + return n; +} +// Parse an ASCII string from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean +function parseStringASCII(buf) { + return buf.toString('ascii'); +} +// Parse a Date from the DER-encoded buffer +// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1 +function parseTime(buf, shortYear) { + const timeStr = parseStringASCII(buf); + // Parse the time string into matches - captured groups start at index 1 + const m = shortYear + ? RE_TIME_SHORT_YEAR.exec(timeStr) + : RE_TIME_LONG_YEAR.exec(timeStr); + if (!m) { + throw new Error('invalid time'); + } + // Translate dates with a 2-digit year to 4 digits per the spec + if (shortYear) { + let year = Number(m[1]); + year += year >= 50 ? 1900 : 2000; + m[1] = year.toString(); + } + // Translate to ISO8601 format and parse + return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`); +} +// Parse an OID from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier +function parseOID(buf) { + let pos = 0; + const end = buf.length; + // Consume first byte which encodes the first two OID components + let n = buf[pos++]; + const first = Math.floor(n / 40); + const second = n % 40; + let oid = `${first}.${second}`; + // Consume remaining bytes + let val = 0; + for (; pos < end; ++pos) { + n = buf[pos]; + val = (val << 7) + (n & 0x7f); + // If the left-most bit is NOT set, then this is the last byte in the + // sequence and we can add the value to the OID and reset the accumulator + if ((n & 0x80) === 0) { + oid += `.${val}`; + val = 0; + } + } + return oid; +} +// Parse a boolean from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean +function parseBoolean(buf) { + return buf[0] !== 0; +} +// Parse a bit string from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string +function parseBitString(buf) { + // First byte tell us how many unused bits are in the last byte + const unused = buf[0]; + const start = 1; + const end = buf.length; + const bits = []; + for (let i = start; i < end; ++i) { + const byte = buf[i]; + // The skip value is only used for the last byte + const skip = i === end - 1 ? unused : 0; + // Iterate over each bit in the byte (most significant first) + for (let j = 7; j >= skip; --j) { + // Read the bit and add it to the bit string + bits.push((byte >> j) & 0x01); + } + } + return bits; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/tag.js new file mode 100644 index 0000000000000..84dd938d049aa --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/tag.js @@ -0,0 +1,86 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Tag = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("./error"); +const UNIVERSAL_TAG = { + BOOLEAN: 0x01, + INTEGER: 0x02, + BIT_STRING: 0x03, + OCTET_STRING: 0x04, + OBJECT_IDENTIFIER: 0x06, + SEQUENCE: 0x10, + SET: 0x11, + PRINTABLE_STRING: 0x13, + UTC_TIME: 0x17, + GENERALIZED_TIME: 0x18, +}; +const TAG_CLASS = { + UNIVERSAL: 0x00, + APPLICATION: 0x01, + CONTEXT_SPECIFIC: 0x02, + PRIVATE: 0x03, +}; +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes +class ASN1Tag { + constructor(enc) { + // Bits 0 through 4 are the tag number + this.number = enc & 0x1f; + // Bit 5 is the constructed bit + this.constructed = (enc & 0x20) === 0x20; + // Bit 6 & 7 are the class + this.class = enc >> 6; + if (this.number === 0x1f) { + throw new error_1.ASN1ParseError('long form tags not supported'); + } + if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) { + throw new error_1.ASN1ParseError('unsupported tag 0x00'); + } + } + isUniversal() { + return this.class === TAG_CLASS.UNIVERSAL; + } + isContextSpecific(num) { + const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC; + return num !== undefined ? res && this.number === num : res; + } + isBoolean() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN; + } + isInteger() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER; + } + isBitString() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING; + } + isOctetString() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING; + } + isOID() { + return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER); + } + isUTCTime() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME; + } + isGeneralizedTime() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME; + } + toDER() { + return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6); + } +} +exports.ASN1Tag = ASN1Tag; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/crypto.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/crypto.js new file mode 100644 index 0000000000000..296b5ba43e86a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/crypto.js @@ -0,0 +1,60 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createPublicKey = createPublicKey; +exports.digest = digest; +exports.verify = verify; +exports.bufferEqual = bufferEqual; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +function createPublicKey(key, type = 'spki') { + if (typeof key === 'string') { + return crypto_1.default.createPublicKey(key); + } + else { + return crypto_1.default.createPublicKey({ key, format: 'der', type: type }); + } +} +function digest(algorithm, ...data) { + const hash = crypto_1.default.createHash(algorithm); + for (const d of data) { + hash.update(d); + } + return hash.digest(); +} +function verify(data, key, signature, algorithm) { + // The try/catch is to work around an issue in Node 14.x where verify throws + // an error in some scenarios if the signature is invalid. + try { + return crypto_1.default.verify(algorithm, data, key, signature); + } + catch (e) { + /* istanbul ignore next */ + return false; + } +} +function bufferEqual(a, b) { + try { + return crypto_1.default.timingSafeEqual(a, b); + } + catch { + /* istanbul ignore next */ + return false; + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/dsse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/dsse.js new file mode 100644 index 0000000000000..ca7b63630e2ba --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/dsse.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.preAuthEncoding = preAuthEncoding; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PAE_PREFIX = 'DSSEv1'; +// DSSE Pre-Authentication Encoding +function preAuthEncoding(payloadType, payload) { + const prefix = [ + PAE_PREFIX, + payloadType.length, + payloadType, + payload.length, + '', + ].join(' '); + return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/encoding.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/encoding.js new file mode 100644 index 0000000000000..7113af66db4c2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/encoding.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.base64Encode = base64Encode; +exports.base64Decode = base64Decode; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const BASE64_ENCODING = 'base64'; +const UTF8_ENCODING = 'utf-8'; +function base64Encode(str) { + return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING); +} +function base64Decode(str) { + return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/index.js new file mode 100644 index 0000000000000..ac35e86a8df7d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/index.js @@ -0,0 +1,56 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var asn1_1 = require("./asn1"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } }); +exports.crypto = __importStar(require("./crypto")); +exports.dsse = __importStar(require("./dsse")); +exports.encoding = __importStar(require("./encoding")); +exports.json = __importStar(require("./json")); +exports.pem = __importStar(require("./pem")); +var rfc3161_1 = require("./rfc3161"); +Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } }); +var stream_1 = require("./stream"); +Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } }); +var x509_1 = require("./x509"); +Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } }); +Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } }); +Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/json.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/json.js new file mode 100644 index 0000000000000..7808d033b98cc --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/json.js @@ -0,0 +1,60 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.canonicalize = canonicalize; +// JSON canonicalization per https://github.com/cyberphone/json-canonicalization +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function canonicalize(object) { + let buffer = ''; + if (object === null || typeof object !== 'object' || object.toJSON != null) { + // Primitives or toJSONable objects + buffer += JSON.stringify(object); + } + else if (Array.isArray(object)) { + // Array - maintain element order + buffer += '['; + let first = true; + object.forEach((element) => { + if (!first) { + buffer += ','; + } + first = false; + // recursive call + buffer += canonicalize(element); + }); + buffer += ']'; + } + else { + // Object - Sort properties before serializing + buffer += '{'; + let first = true; + Object.keys(object) + .sort() + .forEach((property) => { + if (!first) { + buffer += ','; + } + first = false; + buffer += JSON.stringify(property); + buffer += ':'; + // recursive call + buffer += canonicalize(object[property]); + }); + buffer += '}'; + } + return buffer; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/oid.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/oid.js new file mode 100644 index 0000000000000..ac7a643067ad0 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/oid.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0; +exports.ECDSA_SIGNATURE_ALGOS = { + '1.2.840.10045.4.3.1': 'sha224', + '1.2.840.10045.4.3.2': 'sha256', + '1.2.840.10045.4.3.3': 'sha384', + '1.2.840.10045.4.3.4': 'sha512', +}; +exports.SHA2_HASH_ALGOS = { + '2.16.840.1.101.3.4.2.1': 'sha256', + '2.16.840.1.101.3.4.2.2': 'sha384', + '2.16.840.1.101.3.4.2.3': 'sha512', +}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/pem.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/pem.js new file mode 100644 index 0000000000000..f1241d28d586e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/pem.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDER = toDER; +exports.fromDER = fromDER; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PEM_HEADER = /-----BEGIN (.*)-----/; +const PEM_FOOTER = /-----END (.*)-----/; +function toDER(certificate) { + let der = ''; + certificate.split('\n').forEach((line) => { + if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) { + return; + } + der += line; + }); + return Buffer.from(der, 'base64'); +} +// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM +// encoding dictates that each certificate should have a trailing newline after +// the footer. +function fromDER(certificate, type = 'CERTIFICATE') { + // Base64-encode the certificate. + const der = certificate.toString('base64'); + // Split the certificate into lines of 64 characters. + const lines = der.match(/.{1,64}/g) || ''; + return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`] + .join('\n') + .concat('\n'); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/error.js new file mode 100644 index 0000000000000..b9b549b0bb323 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/error.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161TimestampVerificationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class RFC3161TimestampVerificationError extends Error { +} +exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/index.js new file mode 100644 index 0000000000000..b77ecf1c7d50c --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/index.js @@ -0,0 +1,20 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161Timestamp = void 0; +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/timestamp.js new file mode 100644 index 0000000000000..3e61fc1a4e169 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/timestamp.js @@ -0,0 +1,201 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161Timestamp = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const asn1_1 = require("../asn1"); +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const error_1 = require("./error"); +const tstinfo_1 = require("./tstinfo"); +const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2'; +const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4'; +const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4'; +class RFC3161Timestamp { + constructor(asn1) { + this.root = asn1; + } + static parse(der) { + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); + return new RFC3161Timestamp(asn1); + } + get status() { + return this.pkiStatusInfoObj.subs[0].toInteger(); + } + get contentType() { + return this.contentTypeObj.toOID(); + } + get eContentType() { + return this.eContentTypeObj.toOID(); + } + get signingTime() { + return this.tstInfo.genTime; + } + get signerIssuer() { + return this.signerSidObj.subs[0].value; + } + get signerSerialNumber() { + return this.signerSidObj.subs[1].value; + } + get signerDigestAlgorithm() { + const oid = this.signerDigestAlgorithmObj.subs[0].toOID(); + return oid_1.SHA2_HASH_ALGOS[oid]; + } + get signatureAlgorithm() { + const oid = this.signatureAlgorithmObj.subs[0].toOID(); + return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; + } + get signatureValue() { + return this.signatureValueObj.value; + } + get tstInfo() { + // Need to unpack tstInfo from an OCTET STRING + return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]); + } + verify(data, publicKey) { + if (!this.timeStampTokenObj) { + throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing'); + } + // Check for expected ContentInfo content type + if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) { + throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`); + } + // Check for expected encapsulated content type + if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) { + throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`); + } + // Check that the tstInfo references the correct artifact + this.tstInfo.verify(data); + // Check that the signed message digest matches the tstInfo + this.verifyMessageDigest(); + // Check that the signature is valid for the signed attributes + this.verifySignature(publicKey); + } + verifyMessageDigest() { + // Check that the tstInfo matches the signed data + const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw); + const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value; + if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) { + throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo'); + } + } + verifySignature(key) { + // Encode the signed attributes for verification + const signedAttrs = this.signedAttrsObj.toDER(); + signedAttrs[0] = 0x31; // Change context-specific tag to SET + // Check that the signature is valid for the signed attributes + const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm); + if (!verified) { + throw new error_1.RFC3161TimestampVerificationError('signature verification failed'); + } + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get pkiStatusInfoObj() { + // pkiStatusInfo is the first element of the timestamp response sequence + return this.root.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get timeStampTokenObj() { + // timeStampToken is the first element of the timestamp response sequence + return this.root.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-3 + get contentTypeObj() { + return this.timeStampTokenObj.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5652#section-3 + get signedDataObj() { + const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); + return obj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 + get encapContentInfoObj() { + return this.signedDataObj.subs[2]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 + get signerInfosObj() { + // SignerInfos is the last element of the signed data sequence + const sd = this.signedDataObj; + return sd.subs[sd.subs.length - 1]; + } + // https://www.rfc-editor.org/rfc/rfc5652#section-5.1 + get signerInfoObj() { + // Only supporting one signer + return this.signerInfosObj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 + get eContentTypeObj() { + return this.encapContentInfoObj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 + get eContentObj() { + return this.encapContentInfoObj.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signedAttrsObj() { + const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); + return signedAttrs; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get messageDigestAttributeObj() { + const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() && + sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY); + return messageDigest; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signerSidObj() { + return this.signerInfoObj.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signerDigestAlgorithmObj() { + // Signature is the 2nd element of the signerInfoObj object + return this.signerInfoObj.subs[2]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signatureAlgorithmObj() { + // Signature is the 4th element of the signerInfoObj object + return this.signerInfoObj.subs[4]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signatureValueObj() { + // Signature is the 6th element of the signerInfoObj object + return this.signerInfoObj.subs[5]; + } +} +exports.RFC3161Timestamp = RFC3161Timestamp; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js new file mode 100644 index 0000000000000..dc8e4fb339383 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js @@ -0,0 +1,61 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSTInfo = void 0; +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const error_1 = require("./error"); +class TSTInfo { + constructor(asn1) { + this.root = asn1; + } + get version() { + return this.root.subs[0].toInteger(); + } + get genTime() { + return this.root.subs[4].toDate(); + } + get messageImprintHashAlgorithm() { + const oid = this.messageImprintObj.subs[0].subs[0].toOID(); + return oid_1.SHA2_HASH_ALGOS[oid]; + } + get messageImprintHashedMessage() { + return this.messageImprintObj.subs[1].value; + } + get raw() { + return this.root.toDER(); + } + verify(data) { + const digest = crypto.digest(this.messageImprintHashAlgorithm, data); + if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) { + throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact'); + } + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get messageImprintObj() { + return this.root.subs[2]; + } +} +exports.TSTInfo = TSTInfo; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/stream.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/stream.js new file mode 100644 index 0000000000000..0a24f8582eb23 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/stream.js @@ -0,0 +1,115 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ByteStream = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class StreamError extends Error { +} +class ByteStream { + constructor(buffer) { + this.start = 0; + if (buffer) { + this.buf = buffer; + this.view = Buffer.from(buffer); + } + else { + this.buf = new ArrayBuffer(0); + this.view = Buffer.from(this.buf); + } + } + get buffer() { + return this.view.subarray(0, this.start); + } + get length() { + return this.view.byteLength; + } + get position() { + return this.start; + } + seek(position) { + this.start = position; + } + // Returns a Buffer containing the specified number of bytes starting at the + // given start position. + slice(start, len) { + const end = start + len; + if (end > this.length) { + throw new StreamError('request past end of buffer'); + } + return this.view.subarray(start, end); + } + appendChar(char) { + this.ensureCapacity(1); + this.view[this.start] = char; + this.start += 1; + } + appendUint16(num) { + this.ensureCapacity(2); + const value = new Uint16Array([num]); + const view = new Uint8Array(value.buffer); + this.view[this.start] = view[1]; + this.view[this.start + 1] = view[0]; + this.start += 2; + } + appendUint24(num) { + this.ensureCapacity(3); + const value = new Uint32Array([num]); + const view = new Uint8Array(value.buffer); + this.view[this.start] = view[2]; + this.view[this.start + 1] = view[1]; + this.view[this.start + 2] = view[0]; + this.start += 3; + } + appendView(view) { + this.ensureCapacity(view.length); + this.view.set(view, this.start); + this.start += view.length; + } + getBlock(size) { + if (size <= 0) { + return Buffer.alloc(0); + } + if (this.start + size > this.view.length) { + throw new Error('request past end of buffer'); + } + const result = this.view.subarray(this.start, this.start + size); + this.start += size; + return result; + } + getUint8() { + return this.getBlock(1)[0]; + } + getUint16() { + const block = this.getBlock(2); + return (block[0] << 8) | block[1]; + } + ensureCapacity(size) { + if (this.start + size > this.view.byteLength) { + const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0); + this.realloc(this.view.byteLength + blockSize); + } + } + realloc(size) { + const newArray = new ArrayBuffer(size); + const newView = Buffer.from(newArray); + // Copy the old buffer into the new one + newView.set(this.view); + this.buf = newArray; + this.view = newView; + } +} +exports.ByteStream = ByteStream; +ByteStream.BLOCK_SIZE = 1024; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/cert.js new file mode 100644 index 0000000000000..72ea8e0738bc8 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/cert.js @@ -0,0 +1,230 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const asn1_1 = require("../asn1"); +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const pem = __importStar(require("../pem")); +const ext_1 = require("./ext"); +const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14'; +const EXTENSION_OID_KEY_USAGE = '2.5.29.15'; +const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17'; +const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19'; +const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35'; +exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2'; +class X509Certificate { + constructor(asn1) { + this.root = asn1; + } + static parse(cert) { + const der = typeof cert === 'string' ? pem.toDER(cert) : cert; + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); + return new X509Certificate(asn1); + } + get tbsCertificate() { + return this.tbsCertificateObj; + } + get version() { + // version number is the first element of the version context specific tag + const ver = this.versionObj.subs[0].toInteger(); + return `v${(ver + BigInt(1)).toString()}`; + } + get serialNumber() { + return this.serialNumberObj.value; + } + get notBefore() { + // notBefore is the first element of the validity sequence + return this.validityObj.subs[0].toDate(); + } + get notAfter() { + // notAfter is the second element of the validity sequence + return this.validityObj.subs[1].toDate(); + } + get issuer() { + return this.issuerObj.value; + } + get subject() { + return this.subjectObj.value; + } + get publicKey() { + return this.subjectPublicKeyInfoObj.toDER(); + } + get signatureAlgorithm() { + const oid = this.signatureAlgorithmObj.subs[0].toOID(); + return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; + } + get signatureValue() { + // Signature value is a bit string, so we need to skip the first byte + return this.signatureValueObj.value.subarray(1); + } + get subjectAltName() { + const ext = this.extSubjectAltName; + return ext?.uri || /* istanbul ignore next */ ext?.rfc822Name; + } + get extensions() { + // The extension list is the first (and only) element of the extensions + // context specific tag + /* istanbul ignore next */ + const extSeq = this.extensionsObj?.subs[0]; + /* istanbul ignore next */ + return extSeq?.subs || []; + } + get extKeyUsage() { + const ext = this.findExtension(EXTENSION_OID_KEY_USAGE); + return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined; + } + get extBasicConstraints() { + const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS); + return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined; + } + get extSubjectAltName() { + const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME); + return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined; + } + get extAuthorityKeyID() { + const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID); + return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined; + } + get extSubjectKeyID() { + const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID); + return ext + ? new ext_1.X509SubjectKeyIDExtension(ext) + : /* istanbul ignore next */ undefined; + } + get extSCT() { + const ext = this.findExtension(exports.EXTENSION_OID_SCT); + return ext ? new ext_1.X509SCTExtension(ext) : undefined; + } + get isCA() { + const ca = this.extBasicConstraints?.isCA || false; + // If the KeyUsage extension is present, keyCertSign must be set + if (this.extKeyUsage) { + return ca && this.extKeyUsage.keyCertSign; + } + // TODO: test coverage for this case + /* istanbul ignore next */ + return ca; + } + extension(oid) { + const ext = this.findExtension(oid); + return ext ? new ext_1.X509Extension(ext) : undefined; + } + verify(issuerCertificate) { + // Use the issuer's public key if provided, otherwise use the subject's + const publicKey = issuerCertificate?.publicKey || this.publicKey; + const key = crypto.createPublicKey(publicKey); + return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm); + } + validForDate(date) { + return this.notBefore <= date && date <= this.notAfter; + } + equals(other) { + return this.root.toDER().equals(other.root.toDER()); + } + // Creates a copy of the certificate with a new buffer + clone() { + const der = this.root.toDER(); + const clone = Buffer.alloc(der.length); + der.copy(clone); + return X509Certificate.parse(clone); + } + findExtension(oid) { + // Find the extension with the given OID. The OID will always be the first + // element of the extension sequence + return this.extensions.find((ext) => ext.subs[0].toOID() === oid); + } + ///////////////////////////////////////////////////////////////////////////// + // The following properties use the documented x509 structure to locate the + // desired ASN.1 object + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1 + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1 + get tbsCertificateObj() { + // tbsCertificate is the first element of the certificate sequence + return this.root.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2 + get signatureAlgorithmObj() { + // signatureAlgorithm is the second element of the certificate sequence + return this.root.subs[1]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3 + get signatureValueObj() { + // signatureValue is the third element of the certificate sequence + return this.root.subs[2]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1 + get versionObj() { + // version is the first element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2 + get serialNumberObj() { + // serialNumber is the second element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[1]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4 + get issuerObj() { + // issuer is the fourth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[3]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5 + get validityObj() { + // version is the fifth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[4]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6 + get subjectObj() { + // subject is the sixth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[5]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7 + get subjectPublicKeyInfoObj() { + // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[6]; + } + // Extensions can't be located by index because their position varies. Instead, + // we need to find the extensions context specific tag + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9 + get extensionsObj() { + return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03)); + } +} +exports.X509Certificate = X509Certificate; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/ext.js new file mode 100644 index 0000000000000..1d481261b0aa6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/ext.js @@ -0,0 +1,145 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0; +const stream_1 = require("../stream"); +const sct_1 = require("./sct"); +// https://www.rfc-editor.org/rfc/rfc5280#section-4.1 +class X509Extension { + constructor(asn1) { + this.root = asn1; + } + get oid() { + return this.root.subs[0].toOID(); + } + get critical() { + // The critical field is optional and will be the second element of the + // extension sequence if present. Default to false if not present. + return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false; + } + get value() { + return this.extnValueObj.value; + } + get valueObj() { + return this.extnValueObj; + } + get extnValueObj() { + // The extnValue field will be the last element of the extension sequence + return this.root.subs[this.root.subs.length - 1]; + } +} +exports.X509Extension = X509Extension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9 +class X509BasicConstraintsExtension extends X509Extension { + get isCA() { + return this.sequence.subs[0]?.toBoolean() ?? false; + } + get pathLenConstraint() { + return this.sequence.subs.length > 1 + ? this.sequence.subs[1].toInteger() + : undefined; + } + // The extnValue field contains a single sequence wrapping the isCA and + // pathLenConstraint. + get sequence() { + return this.extnValueObj.subs[0]; + } +} +exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3 +class X509KeyUsageExtension extends X509Extension { + get digitalSignature() { + return this.bitString[0] === 1; + } + get keyCertSign() { + return this.bitString[5] === 1; + } + get crlSign() { + return this.bitString[6] === 1; + } + // The extnValue field contains a single bit string which is a bit mask + // indicating which key usages are enabled. + get bitString() { + return this.extnValueObj.subs[0].toBitString(); + } +} +exports.X509KeyUsageExtension = X509KeyUsageExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6 +class X509SubjectAlternativeNameExtension extends X509Extension { + get rfc822Name() { + return this.findGeneralName(0x01)?.value.toString('ascii'); + } + get uri() { + return this.findGeneralName(0x06)?.value.toString('ascii'); + } + // Retrieve the value of an otherName with the given OID. + otherName(oid) { + const otherName = this.findGeneralName(0x00); + if (otherName === undefined) { + return undefined; + } + // The otherName is a sequence containing an OID and a value. + // Need to check that the OID matches the one we're looking for. + const otherNameOID = otherName.subs[0].toOID(); + if (otherNameOID !== oid) { + return undefined; + } + // The otherNameValue is a sequence containing the actual value. + const otherNameValue = otherName.subs[1]; + return otherNameValue.subs[0].value.toString('ascii'); + } + findGeneralName(tag) { + return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag)); + } + // The extnValue field contains a sequence of GeneralNames. + get generalNames() { + return this.extnValueObj.subs[0].subs; + } +} +exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1 +class X509AuthorityKeyIDExtension extends X509Extension { + get keyIdentifier() { + return this.findSequenceMember(0x00)?.value; + } + findSequenceMember(tag) { + return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag)); + } + // The extnValue field contains a single sequence wrapping the keyIdentifier + get sequence() { + return this.extnValueObj.subs[0]; + } +} +exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2 +class X509SubjectKeyIDExtension extends X509Extension { + get keyIdentifier() { + return this.extnValueObj.subs[0].value; + } +} +exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension; +// https://www.rfc-editor.org/rfc/rfc6962#section-3.3 +class X509SCTExtension extends X509Extension { + constructor(asn1) { + super(asn1); + } + get signedCertificateTimestamps() { + const buf = this.extnValueObj.subs[0].value; + const stream = new stream_1.ByteStream(buf); + // The overall list length is encoded in the first two bytes -- note this + // is the length of the list in bytes, NOT the number of SCTs in the list + const end = stream.getUint16() + 2; + const sctList = []; + while (stream.position < end) { + // Read the length of the next SCT + const sctLength = stream.getUint16(); + // Slice out the bytes for the next SCT and parse it + const sct = stream.getBlock(sctLength); + sctList.push(sct_1.SignedCertificateTimestamp.parse(sct)); + } + if (stream.position !== end) { + throw new Error('SCT list length does not match actual length'); + } + return sctList; + } +} +exports.X509SCTExtension = X509SCTExtension; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/index.js new file mode 100644 index 0000000000000..cdd77e58f37d5 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/index.js @@ -0,0 +1,23 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; +var cert_1 = require("./cert"); +Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } }); +Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } }); +var ext_1 = require("./ext"); +Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/sct.js new file mode 100644 index 0000000000000..1603059c0d1ac --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/sct.js @@ -0,0 +1,141 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SignedCertificateTimestamp = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto = __importStar(require("../crypto")); +const stream_1 = require("../stream"); +class SignedCertificateTimestamp { + constructor(options) { + this.version = options.version; + this.logID = options.logID; + this.timestamp = options.timestamp; + this.extensions = options.extensions; + this.hashAlgorithm = options.hashAlgorithm; + this.signatureAlgorithm = options.signatureAlgorithm; + this.signature = options.signature; + } + get datetime() { + return new Date(Number(this.timestamp.readBigInt64BE())); + } + // Returns the hash algorithm used to generate the SCT's signature. + // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 + get algorithm() { + switch (this.hashAlgorithm) { + /* istanbul ignore next */ + case 0: + return 'none'; + /* istanbul ignore next */ + case 1: + return 'md5'; + /* istanbul ignore next */ + case 2: + return 'sha1'; + /* istanbul ignore next */ + case 3: + return 'sha224'; + case 4: + return 'sha256'; + /* istanbul ignore next */ + case 5: + return 'sha384'; + /* istanbul ignore next */ + case 6: + return 'sha512'; + /* istanbul ignore next */ + default: + return 'unknown'; + } + } + verify(preCert, key) { + // Assemble the digitally-signed struct (the data over which the signature + // was generated). + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + const stream = new stream_1.ByteStream(); + stream.appendChar(this.version); + stream.appendChar(0x00); // SignatureType = certificate_timestamp(0) + stream.appendView(this.timestamp); + stream.appendUint16(0x01); // LogEntryType = precert_entry(1) + stream.appendView(preCert); + stream.appendUint16(this.extensions.byteLength); + /* istanbul ignore next - extensions are very uncommon */ + if (this.extensions.byteLength > 0) { + stream.appendView(this.extensions); + } + return crypto.verify(stream.buffer, key, this.signature, this.algorithm); + } + // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using + // TLS encoding which means the fields and lengths of most fields are + // specified as part of the SCT and TLS specs. + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 + static parse(buf) { + const stream = new stream_1.ByteStream(buf); + // Version - enum { v1(0), (255) } + const version = stream.getUint8(); + // Log ID - struct { opaque key_id[32]; } + const logID = stream.getBlock(32); + // Timestamp - uint64 + const timestamp = stream.getBlock(8); + // Extensions - opaque extensions<0..2^16-1>; + const extenstionLength = stream.getUint16(); + const extensions = stream.getBlock(extenstionLength); + // Hash algo - enum { sha256(4), . . . (255) } + const hashAlgorithm = stream.getUint8(); + // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) } + const signatureAlgorithm = stream.getUint8(); + // Signature - opaque signature<0..2^16-1>; + const sigLength = stream.getUint16(); + const signature = stream.getBlock(sigLength); + // Check that we read the entire buffer + if (stream.position !== buf.length) { + throw new Error('SCT buffer length mismatch'); + } + return new SignedCertificateTimestamp({ + version, + logID, + timestamp, + extensions, + hashAlgorithm, + signatureAlgorithm, + signature, + }); + } +} +exports.SignedCertificateTimestamp = SignedCertificateTimestamp; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/package.json new file mode 100644 index 0000000000000..af5dd281ac90e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/core", + "version": "2.0.0", + "description": "Base library for Sigstore", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme", + "publishConfig": { + "provenance": true + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/base.js new file mode 100644 index 0000000000000..61d5eba4568a3 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/base.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaseBundleBuilder = void 0; +// BaseBundleBuilder is a base class for BundleBuilder implementations. It +// provides a the basic wokflow for signing and witnessing an artifact. +// Subclasses must implement the `package` method to assemble a valid bundle +// with the generated signature and verification material. +class BaseBundleBuilder { + constructor(options) { + this.signer = options.signer; + this.witnesses = options.witnesses; + } + // Executes the signing/witnessing process for the given artifact. + async create(artifact) { + const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob)); + const bundle = await this.package(artifact, signature); + // Invoke all of the witnesses in parallel + const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key)))); + // Collect the verification material from all of the witnesses + const tlogEntryList = []; + const timestampList = []; + verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => { + tlogEntryList.push(...(tlogEntries ?? [])); + timestampList.push(...(rfc3161Timestamps ?? [])); + }); + // Merge the collected verification material into the bundle + bundle.verificationMaterial.tlogEntries = tlogEntryList; + bundle.verificationMaterial.timestampVerificationData = { + rfc3161Timestamps: timestampList, + }; + return bundle; + } + // Override this function to apply any pre-signing transformations to the + // artifact. The returned buffer will be signed by the signer. The default + // implementation simply returns the artifact data. + async prepare(artifact) { + return artifact.data; + } +} +exports.BaseBundleBuilder = BaseBundleBuilder; +// Extracts the public key from a KeyMaterial. Returns either the public key +// or the certificate, depending on the type of key material. +function publicKey(key) { + switch (key.$case) { + case 'publicKey': + return key.publicKey; + case 'x509Certificate': + return key.certificate; + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/bundle.js new file mode 100644 index 0000000000000..ed32286ad88ef --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/bundle.js @@ -0,0 +1,71 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toMessageSignatureBundle = toMessageSignatureBundle; +exports.toDSSEBundle = toDSSEBundle; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const sigstore = __importStar(require("@sigstore/bundle")); +const util_1 = require("../util"); +// Helper functions for assembling the parts of a Sigstore bundle +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(artifact, signature) { + const digest = util_1.crypto.digest('sha256', artifact.data); + return sigstore.toMessageSignatureBundle({ + digest, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + certificateChain: true, + }); +} +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(artifact, signature, certificateChain) { + return sigstore.toDSSEBundle({ + artifact: artifact.data, + artifactType: artifact.type, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + certificateChain, + }); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/dsse.js new file mode 100644 index 0000000000000..86046ba8f3013 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/dsse.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSEBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../util"); +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for DSSE wrapped attestations +class DSSEBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + this.certificateChain = options.certificateChain ?? false; + } + // DSSE requires the artifact to be pre-encoded with the payload type + // before the signature is generated. + async prepare(artifact) { + const a = artifactDefaults(artifact); + return util_1.dsse.preAuthEncoding(a.type, a.data); + } + // Packages the artifact and signature into a DSSE bundle + async package(artifact, signature) { + return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.certificateChain); + } +} +exports.DSSEBundleBuilder = DSSEBundleBuilder; +// Defaults the artifact type to an empty string if not provided +function artifactDefaults(artifact) { + return { + ...artifact, + type: artifact.type ?? '', + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/index.js new file mode 100644 index 0000000000000..d67c8c324a4f0 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/index.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var dsse_1 = require("./dsse"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } }); +var message_1 = require("./message"); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/message.js new file mode 100644 index 0000000000000..e3991f42bab93 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/message.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for raw message signatures +class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + } + async package(artifact, signature) { + return (0, bundle_1.toMessageSignatureBundle)(artifact, signature); + } +} +exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/error.js new file mode 100644 index 0000000000000..d28f1913cc77e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/error.js @@ -0,0 +1,39 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.InternalError = void 0; +exports.internalError = internalError; +const error_1 = require("./external/error"); +class InternalError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.name = this.constructor.name; + this.cause = cause; + this.code = code; + } +} +exports.InternalError = InternalError; +function internalError(err, code, message) { + if (err instanceof error_1.HTTPError) { + message += ` - ${err.message}`; + } + throw new InternalError({ + code: code, + message: message, + cause: err, + }); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/error.js new file mode 100644 index 0000000000000..a6a65adebb176 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/error.js @@ -0,0 +1,26 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HTTPError = void 0; +class HTTPError extends Error { + constructor({ status, message, location, }) { + super(`(${status}) ${message}`); + this.statusCode = status; + this.location = location; + } +} +exports.HTTPError = HTTPError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fetch.js new file mode 100644 index 0000000000000..116090f3c641e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fetch.js @@ -0,0 +1,98 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fetchWithRetry = fetchWithRetry; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const http2_1 = require("http2"); +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const proc_log_1 = require("proc-log"); +const promise_retry_1 = __importDefault(require("promise-retry")); +const util_1 = require("../util"); +const error_1 = require("./error"); +const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants; +async function fetchWithRetry(url, options) { + return (0, promise_retry_1.default)(async (retry, attemptNum) => { + const method = options.method || 'POST'; + const headers = { + [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(), + ...options.headers, + }; + const response = await (0, make_fetch_happen_1.default)(url, { + method, + headers, + body: options.body, + timeout: options.timeout, + retry: false, // We're handling retries ourselves + }).catch((reason) => { + proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`); + return retry(reason); + }); + if (response.ok) { + return response; + } + else { + const error = await errorFromResponse(response); + proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`); + if (retryable(response.status)) { + return retry(error); + } + else { + throw error; + } + } + }, retryOpts(options.retry)); +} +// Translate a Response into an HTTPError instance. This will attempt to parse +// the response body for a message, but will default to the statusText if none +// is found. +const errorFromResponse = async (response) => { + let message = response.statusText; + const location = response.headers.get(HTTP2_HEADER_LOCATION) || undefined; + const contentType = response.headers.get(HTTP2_HEADER_CONTENT_TYPE); + // If response type is JSON, try to parse the body for a message + if (contentType?.includes('application/json')) { + try { + const body = await response.json(); + message = body.message || message; + } + catch (e) { + // ignore + } + } + return new error_1.HTTPError({ + status: response.status, + message: message, + location: location, + }); +}; +// Determine if a status code is retryable. This includes 5xx errors, 408, and +// 429. +const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR; +// Normalize the retry options to the format expected by promise-retry +const retryOpts = (retry) => { + if (typeof retry === 'boolean') { + return { retries: retry ? 1 : 0 }; + } + else if (typeof retry === 'number') { + return { retries: retry }; + } + else { + return { retries: 0, ...retry }; + } +}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fulcio.js new file mode 100644 index 0000000000000..de6a1ad9f9e79 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fulcio.js @@ -0,0 +1,41 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Fulcio = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +/** + * Fulcio API client. + */ +class Fulcio { + constructor(options) { + this.options = options; + } + async createSigningCertificate(request) { + const { baseURL, retry, timeout } = this.options; + const url = `${baseURL}/api/v2/signingCert`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + timeout, + retry, + }); + return response.json(); + } +} +exports.Fulcio = Fulcio; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/rekor.js new file mode 100644 index 0000000000000..bb59a126e032f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/rekor.js @@ -0,0 +1,80 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Rekor = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +/** + * Rekor API client. + */ +class Rekor { + constructor(options) { + this.options = options; + } + /** + * Create a new entry in the Rekor log. + * @param propsedEntry {ProposedEntry} Data to create a new entry + * @returns {Promise} The created entry + */ + async createEntry(propsedEntry) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/log/entries`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify(propsedEntry), + timeout, + retry, + }); + const data = await response.json(); + return entryFromResponse(data); + } + /** + * Get an entry from the Rekor log. + * @param uuid {string} The UUID of the entry to retrieve + * @returns {Promise} The retrieved entry + */ + async getEntry(uuid) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/log/entries/${uuid}`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + method: 'GET', + headers: { + Accept: 'application/json', + }, + timeout, + retry, + }); + const data = await response.json(); + return entryFromResponse(data); + } +} +exports.Rekor = Rekor; +// Unpack the response from the Rekor API into a more convenient format. +function entryFromResponse(data) { + const entries = Object.entries(data); + if (entries.length != 1) { + throw new Error('Received multiple entries in Rekor response'); + } + // Grab UUID and entry data from the response + const [uuid, entry] = entries[0]; + return { + ...entry, + uuid, + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/tsa.js new file mode 100644 index 0000000000000..a948ba9cca2c7 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/tsa.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimestampAuthority = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +class TimestampAuthority { + constructor(options) { + this.options = options; + } + async createTimestamp(request) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/timestamp`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + timeout, + retry, + }); + return response.buffer(); + } +} +exports.TimestampAuthority = TimestampAuthority; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/ci.js new file mode 100644 index 0000000000000..d79133952b605 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/ci.js @@ -0,0 +1,73 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +// Collection of all the CI-specific providers we have implemented +const providers = [getGHAToken, getEnv]; +/** + * CIContextProvider is a composite identity provider which will iterate + * over all of the CI-specific providers and return the token from the first + * one that resolves. + */ +class CIContextProvider { + /* istanbul ignore next */ + constructor(audience = 'sigstore') { + this.audience = audience; + } + // Invoke all registered ProviderFuncs and return the value of whichever one + // resolves first. + async getToken() { + return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available')); + } +} +exports.CIContextProvider = CIContextProvider; +/** + * getGHAToken can retrieve an OIDC token when running in a GitHub Actions + * workflow + */ +async function getGHAToken(audience) { + // Check to see if we're running in GitHub Actions + if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL || + !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) { + return Promise.reject('no token available'); + } + // Construct URL to request token w/ appropriate audience + const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL); + url.searchParams.append('audience', audience); + const response = await (0, make_fetch_happen_1.default)(url.href, { + retry: 2, + headers: { + Accept: 'application/json', + Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`, + }, + }); + return response.json().then((data) => data.value); +} +/** + * getEnv can retrieve an OIDC token from an environment variable. + * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar + */ +async function getEnv() { + if (!process.env.SIGSTORE_ID_TOKEN) { + return Promise.reject('no token available'); + } + return process.env.SIGSTORE_ID_TOKEN; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/index.js new file mode 100644 index 0000000000000..1c1223b443fab --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var ci_1 = require("./ci"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/provider.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/provider.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/index.js new file mode 100644 index 0000000000000..383b76083361b --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/index.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var bundler_1 = require("./bundler"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } }); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } }); +var identity_1 = require("./identity"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } }); +var signer_1 = require("./signer"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } }); +var witness_1 = require("./witness"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } }); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js new file mode 100644 index 0000000000000..f01703cfab564 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const fulcio_1 = require("../../external/fulcio"); +class CAClient { + constructor(options) { + this.fulcio = new fulcio_1.Fulcio({ + baseURL: options.fulcioBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createSigningCertificate(identityToken, publicKey, challenge) { + const request = toCertificateRequest(identityToken, publicKey, challenge); + try { + const resp = await this.fulcio.createSigningCertificate(request); + // Account for the fact that the response may contain either a + // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. + const cert = resp.signedCertificateEmbeddedSct + ? resp.signedCertificateEmbeddedSct + : resp.signedCertificateDetachedSct; + return cert.chain.certificates; + } + catch (err) { + (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate'); + } + } +} +exports.CAClient = CAClient; +function toCertificateRequest(identityToken, publicKey, challenge) { + return { + credentials: { + oidcIdentityToken: identityToken, + }, + publicKeyRequest: { + publicKey: { + algorithm: 'ECDSA', + content: publicKey, + }, + proofOfPossession: challenge.toString('base64'), + }, + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js new file mode 100644 index 0000000000000..481aa5c3579a2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js @@ -0,0 +1,45 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EphemeralSigner = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const EC_KEYPAIR_TYPE = 'ec'; +const P256_CURVE = 'P-256'; +// Signer implementation which uses an ephemeral keypair to sign artifacts. +// The private key lives only in memory and is tied to the lifetime of the +// EphemeralSigner instance. +class EphemeralSigner { + constructor() { + this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, { + namedCurve: P256_CURVE, + }); + } + async sign(data) { + const signature = crypto_1.default.sign(null, data, this.keypair.privateKey); + const publicKey = this.keypair.publicKey + .export({ format: 'pem', type: 'spki' }) + .toString('ascii'); + return { + signature: signature, + key: { $case: 'publicKey', publicKey }, + }; + } +} +exports.EphemeralSigner = EphemeralSigner; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/index.js new file mode 100644 index 0000000000000..89a432548d2b4 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/index.js @@ -0,0 +1,87 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const util_1 = require("../../util"); +const ca_1 = require("./ca"); +const ephemeral_1 = require("./ephemeral"); +exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; +// Signer implementation which can be used to decorate another signer +// with a Fulcio-issued signing certificate for the signer's public key. +// Must be instantiated with an identity provider which can provide a JWT +// which represents the identity to be bound to the signing certificate. +class FulcioSigner { + constructor(options) { + this.ca = new ca_1.CAClient({ + ...options, + fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL, + }); + this.identityProvider = options.identityProvider; + this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner(); + } + async sign(data) { + // Retrieve identity token from the supplied identity provider + const identityToken = await this.getIdentityToken(); + // Extract challenge claim from OIDC token + let subject; + try { + subject = util_1.oidc.extractJWTSubject(identityToken); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_PARSE_ERROR', + message: `invalid identity token: ${identityToken}`, + cause: err, + }); + } + // Construct challenge value by signing the subject claim + const challenge = await this.keyHolder.sign(Buffer.from(subject)); + if (challenge.key.$case !== 'publicKey') { + throw new error_1.InternalError({ + code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', + message: 'unexpected format for signing key', + }); + } + // Create signing certificate + const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature); + // Generate artifact signature + const signature = await this.keyHolder.sign(data); + // Specifically returning only the first certificate in the chain + // as the key. + return { + signature: signature.signature, + key: { + $case: 'x509Certificate', + certificate: certificates[0], + }, + }; + } + async getIdentityToken() { + try { + return await this.identityProvider.getToken(); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_READ_ERROR', + message: 'error retrieving identity token', + cause: err, + }); + } + } +} +exports.FulcioSigner = FulcioSigner; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/index.js new file mode 100644 index 0000000000000..e2087767b81c1 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/index.js @@ -0,0 +1,22 @@ +"use strict"; +/* istanbul ignore file */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var fulcio_1 = require("./fulcio"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/signer.js new file mode 100644 index 0000000000000..b92c54183375d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/signer.js @@ -0,0 +1,17 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/types/fetch.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/types/fetch.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/index.js new file mode 100644 index 0000000000000..f467c9150c348 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/index.js @@ -0,0 +1,49 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var core_1 = require("@sigstore/core"); +Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } }); +Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } }); +Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } }); +Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } }); +Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } }); +exports.oidc = __importStar(require("./oidc")); +exports.ua = __importStar(require("./ua")); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/oidc.js new file mode 100644 index 0000000000000..37c5b168ee12e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/oidc.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extractJWTSubject = extractJWTSubject; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +function extractJWTSubject(jwt) { + const parts = jwt.split('.', 3); + const payload = JSON.parse(core_1.encoding.base64Decode(parts[1])); + switch (payload.iss) { + case 'https://accounts.google.com': + case 'https://oauth2.sigstore.dev/auth': + return payload.email; + default: + return payload.sub; + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/ua.js new file mode 100644 index 0000000000000..b15ff2070fb9f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/ua.js @@ -0,0 +1,32 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUserAgent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +// Format User-Agent: / () +// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent +const getUserAgent = () => { + const packageVersion = require('../../package.json').version; + const nodeVersion = process.version; + const platformName = os_1.default.platform(); + const archName = os_1.default.arch(); + return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`; +}; +exports.getUserAgent = getUserAgent; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/index.js new file mode 100644 index 0000000000000..72677c399caa7 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/index.js @@ -0,0 +1,24 @@ +"use strict"; +/* istanbul ignore file */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var tlog_1 = require("./tlog"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } }); +var tsa_1 = require("./tsa"); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/client.js new file mode 100644 index 0000000000000..22c895f2ca7ed --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/client.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TLogClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const error_2 = require("../../external/error"); +const rekor_1 = require("../../external/rekor"); +class TLogClient { + constructor(options) { + this.fetchOnConflict = options.fetchOnConflict ?? false; + this.rekor = new rekor_1.Rekor({ + baseURL: options.rekorBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createEntry(proposedEntry) { + let entry; + try { + entry = await this.rekor.createEntry(proposedEntry); + } + catch (err) { + // If the entry already exists, fetch it (if enabled) + if (entryExistsError(err) && this.fetchOnConflict) { + // Grab the UUID of the existing entry from the location header + /* istanbul ignore next */ + const uuid = err.location.split('/').pop() || ''; + try { + entry = await this.rekor.getEntry(uuid); + } + catch (err) { + (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry'); + } + } + else { + (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry'); + } + } + return entry; + } +} +exports.TLogClient = TLogClient; +function entryExistsError(value) { + return (value instanceof error_2.HTTPError && + value.statusCode === 409 && + value.location !== undefined); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/entry.js new file mode 100644 index 0000000000000..bb1c68e914b90 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/entry.js @@ -0,0 +1,140 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toProposedEntry = toProposedEntry; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("@sigstore/bundle"); +const util_1 = require("../../util"); +const SHA256_ALGORITHM = 'sha256'; +function toProposedEntry(content, publicKey, +// TODO: Remove this parameter once have completely switched to 'dsse' entries +entryType = 'dsse') { + switch (content.$case) { + case 'dsseEnvelope': + // TODO: Remove this conditional once have completely ditched "intoto" entries + if (entryType === 'intoto') { + return toProposedIntotoEntry(content.dsseEnvelope, publicKey); + } + return toProposedDSSEEntry(content.dsseEnvelope, publicKey); + case 'messageSignature': + return toProposedHashedRekordEntry(content.messageSignature, publicKey); + } +} +// Returns a properly formatted Rekor "hashedrekord" entry for the given digest +// and signature +function toProposedHashedRekordEntry(messageSignature, publicKey) { + const hexDigest = messageSignature.messageDigest.digest.toString('hex'); + const b64Signature = messageSignature.signature.toString('base64'); + const b64Key = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'hashedrekord', + spec: { + data: { + hash: { + algorithm: SHA256_ALGORITHM, + value: hexDigest, + }, + }, + signature: { + content: b64Signature, + publicKey: { + content: b64Key, + }, + }, + }, + }; +} +// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope +// and signature +function toProposedDSSEEntry(envelope, publicKey) { + const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope)); + const encodedKey = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'dsse', + spec: { + proposedContent: { + envelope: envelopeJSON, + verifiers: [encodedKey], + }, + }, + }; +} +// Returns a properly formatted Rekor "intoto" entry for the given DSSE +// envelope and signature +function toProposedIntotoEntry(envelope, publicKey) { + // Calculate the value for the payloadHash field in the Rekor entry + const payloadHash = util_1.crypto + .digest(SHA256_ALGORITHM, envelope.payload) + .toString('hex'); + // Calculate the value for the hash field in the Rekor entry + const envelopeHash = calculateDSSEHash(envelope, publicKey); + // Collect values for re-creating the DSSE envelope. + // Double-encode payload and signature cause that's what Rekor expects + const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); + const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64')); + const keyid = envelope.signatures[0].keyid; + const encodedKey = util_1.encoding.base64Encode(publicKey); + // Create the envelope portion of the entry. Note the inclusion of the + // publicKey in the signature struct is not a standard part of a DSSE + // envelope, but is required by Rekor. + const dsse = { + payloadType: envelope.payloadType, + payload: payload, + signatures: [{ sig, publicKey: encodedKey }], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. We + // need to do the same here so that we can properly recreate the entry for + // verification. + if (keyid.length > 0) { + dsse.signatures[0].keyid = keyid; + } + return { + apiVersion: '0.0.2', + kind: 'intoto', + spec: { + content: { + envelope: dsse, + hash: { algorithm: SHA256_ALGORITHM, value: envelopeHash }, + payloadHash: { algorithm: SHA256_ALGORITHM, value: payloadHash }, + }, + }, + }; +} +// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry. +// There is no standard way to do this, so the scheme we're using as as +// follows: +// * payload is base64 encoded +// * signature is base64 encoded (only the first signature is used) +// * keyid is included ONLY if it is NOT an empty string +// * The resulting JSON is canonicalized and hashed to a hex string +function calculateDSSEHash(envelope, publicKey) { + const dsse = { + payloadType: envelope.payloadType, + payload: envelope.payload.toString('base64'), + signatures: [ + { sig: envelope.signatures[0].sig.toString('base64'), publicKey }, + ], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. + if (envelope.signatures[0].keyid.length > 0) { + dsse.signatures[0].keyid = envelope.signatures[0].keyid; + } + return util_1.crypto + .digest(SHA256_ALGORITHM, util_1.json.canonicalize(dsse)) + .toString('hex'); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/index.js new file mode 100644 index 0000000000000..6197b09d4cdd9 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/index.js @@ -0,0 +1,82 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../../util"); +const client_1 = require("./client"); +const entry_1 = require("./entry"); +exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev'; +class RekorWitness { + constructor(options) { + this.entryType = options.entryType; + this.tlog = new client_1.TLogClient({ + ...options, + rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL, + }); + } + async testify(content, publicKey) { + const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType); + const entry = await this.tlog.createEntry(proposedEntry); + return toTransparencyLogEntry(entry); + } +} +exports.RekorWitness = RekorWitness; +function toTransparencyLogEntry(entry) { + const logID = Buffer.from(entry.logID, 'hex'); + // Parse entry body so we can extract the kind and version. + const bodyJSON = util_1.encoding.base64Decode(entry.body); + const entryBody = JSON.parse(bodyJSON); + const promise = entry?.verification?.signedEntryTimestamp + ? inclusionPromise(entry.verification.signedEntryTimestamp) + : undefined; + const proof = entry?.verification?.inclusionProof + ? inclusionProof(entry.verification.inclusionProof) + : undefined; + const tlogEntry = { + logIndex: entry.logIndex.toString(), + logId: { + keyId: logID, + }, + integratedTime: entry.integratedTime.toString(), + kindVersion: { + kind: entryBody.kind, + version: entryBody.apiVersion, + }, + inclusionPromise: promise, + inclusionProof: proof, + canonicalizedBody: Buffer.from(entry.body, 'base64'), + }; + return { + tlogEntries: [tlogEntry], + }; +} +function inclusionPromise(promise) { + return { + signedEntryTimestamp: Buffer.from(promise, 'base64'), + }; +} +function inclusionProof(proof) { + return { + logIndex: proof.logIndex.toString(), + treeSize: proof.treeSize.toString(), + rootHash: Buffer.from(proof.rootHash, 'hex'), + hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), + checkpoint: { + envelope: proof.checkpoint, + }, + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/client.js new file mode 100644 index 0000000000000..754de3748dbb3 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/client.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const tsa_1 = require("../../external/tsa"); +const util_1 = require("../../util"); +const SHA256_ALGORITHM = 'sha256'; +class TSAClient { + constructor(options) { + this.tsa = new tsa_1.TimestampAuthority({ + baseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createTimestamp(signature) { + const request = { + artifactHash: util_1.crypto + .digest(SHA256_ALGORITHM, signature) + .toString('base64'), + hashAlgorithm: SHA256_ALGORITHM, + }; + try { + return await this.tsa.createTimestamp(request); + } + catch (err) { + (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp'); + } + } +} +exports.TSAClient = TSAClient; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/index.js new file mode 100644 index 0000000000000..d4f5c7c859d10 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/index.js @@ -0,0 +1,44 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const client_1 = require("./client"); +class TSAWitness { + constructor(options) { + this.tsa = new client_1.TSAClient({ + tsaBaseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async testify(content) { + const signature = extractSignature(content); + const timestamp = await this.tsa.createTimestamp(signature); + return { + rfc3161Timestamps: [{ signedTimestamp: timestamp }], + }; + } +} +exports.TSAWitness = TSAWitness; +function extractSignature(content) { + switch (content.$case) { + case 'dsseEnvelope': + return content.dsseEnvelope.signatures[0].sig; + case 'messageSignature': + return content.messageSignature.signature; + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/witness.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/witness.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/package.json new file mode 100644 index 0000000000000..fe05e8dc2d73a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/package.json @@ -0,0 +1,46 @@ +{ + "name": "@sigstore/sign", + "version": "3.0.0", + "description": "Sigstore signing library", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@sigstore/mock": "^0.8.0", + "@sigstore/rekor-types": "^3.0.0", + "@types/make-fetch-happen": "^10.0.4", + "@types/promise-retry": "^1.1.6" + }, + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^14.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js new file mode 100644 index 0000000000000..06a8143e70da2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js @@ -0,0 +1,43 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.appDataPath = appDataPath; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +function appDataPath(name) { + const homedir = os_1.default.homedir(); + switch (process.platform) { + /* istanbul ignore next */ + case 'darwin': { + const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); + return path_1.default.join(appSupport, name); + } + /* istanbul ignore next */ + case 'win32': { + const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); + return path_1.default.join(localAppData, name, 'Data'); + } + /* istanbul ignore next */ + default: { + const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); + return path_1.default.join(localData, name); + } + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js new file mode 100644 index 0000000000000..328f49e40dbbd --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js @@ -0,0 +1,111 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const tuf_js_1 = require("tuf-js"); +const _1 = require("."); +const target_1 = require("./target"); +const TARGETS_DIR_NAME = 'targets'; +class TUFClient { + constructor(options) { + const url = new URL(options.mirrorURL); + const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, '')); + const cachePath = path_1.default.join(options.cachePath, repoName); + initTufCache(cachePath); + seedCache({ + cachePath, + mirrorURL: options.mirrorURL, + tufRootPath: options.rootPath, + forceInit: options.forceInit, + }); + this.updater = initClient({ + mirrorURL: options.mirrorURL, + cachePath, + forceCache: options.forceCache, + retry: options.retry, + timeout: options.timeout, + }); + } + async refresh() { + return this.updater.refresh(); + } + getTarget(targetName) { + return (0, target_1.readTarget)(this.updater, targetName); + } +} +exports.TUFClient = TUFClient; +// Initializes the TUF cache directory structure including the initial +// root.json file. If the cache directory does not exist, it will be +// created. If the targets directory does not exist, it will be created. +// If the root.json file does not exist, it will be copied from the +// rootPath argument. +function initTufCache(cachePath) { + const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME); + if (!fs_1.default.existsSync(cachePath)) { + fs_1.default.mkdirSync(cachePath, { recursive: true }); + } + if (!fs_1.default.existsSync(targetsPath)) { + fs_1.default.mkdirSync(targetsPath); + } +} +// Populates the TUF cache with the initial root.json file. If the root.json +// file does not exist (or we're forcing re-initialization), copy it from either +// the rootPath argument or from one of the repo seeds. +function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) { + const cachedRootPath = path_1.default.join(cachePath, 'root.json'); + // If the root.json file does not exist (or we're forcing re-initialization), + // populate it either from the supplied rootPath or from one of the repo seeds. + if (!fs_1.default.existsSync(cachedRootPath) || forceInit) { + if (tufRootPath) { + fs_1.default.copyFileSync(tufRootPath, cachedRootPath); + } + else { + const seeds = require('../seeds.json'); + const repoSeed = seeds[mirrorURL]; + if (!repoSeed) { + throw new _1.TUFError({ + code: 'TUF_INIT_CACHE_ERROR', + message: `No root.json found for mirror: ${mirrorURL}`, + }); + } + fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64')); + // Copy any seed targets into the cache + Object.entries(repoSeed.targets).forEach(([targetName, target]) => { + fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64')); + }); + } + } +} +function initClient(options) { + const config = { + fetchTimeout: options.timeout, + fetchRetry: options.retry, + }; + return new tuf_js_1.Updater({ + metadataBaseUrl: options.mirrorURL, + targetBaseUrl: `${options.mirrorURL}/targets`, + metadataDir: options.cachePath, + targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME), + forceCache: options.forceCache, + config, + }); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js new file mode 100644 index 0000000000000..e13971b289ff2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = void 0; +class TUFError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +exports.TUFError = TUFError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js new file mode 100644 index 0000000000000..2af5de93ec5d2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0; +exports.getTrustedRoot = getTrustedRoot; +exports.initTUF = initTUF; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const appdata_1 = require("./appdata"); +const client_1 = require("./client"); +exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; +const DEFAULT_CACHE_DIR = 'sigstore-js'; +const DEFAULT_RETRY = { retries: 2 }; +const DEFAULT_TIMEOUT = 5000; +const TRUSTED_ROOT_TARGET = 'trusted_root.json'; +async function getTrustedRoot( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); + return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); +} +async function initTUF( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + return client.refresh().then(() => client); +} +// Create a TUF client with default options +function createClient(options) { + /* istanbul ignore next */ + return new client_1.TUFClient({ + cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), + rootPath: options.rootPath, + mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL, + retry: options.retry ?? DEFAULT_RETRY, + timeout: options.timeout ?? DEFAULT_TIMEOUT, + forceCache: options.forceCache ?? false, + forceInit: options.forceInit ?? options.force ?? false, + }); +} +var error_1 = require("./error"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js new file mode 100644 index 0000000000000..5c6675bdfbf5f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js @@ -0,0 +1,79 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readTarget = readTarget; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const error_1 = require("./error"); +// Downloads and returns the specified target from the provided TUF Updater. +async function readTarget(tuf, targetPath) { + const path = await getTargetPath(tuf, targetPath); + return new Promise((resolve, reject) => { + fs_1.default.readFile(path, 'utf-8', (err, data) => { + if (err) { + reject(new error_1.TUFError({ + code: 'TUF_READ_TARGET_ERROR', + message: `error reading target ${path}`, + cause: err, + })); + } + else { + resolve(data); + } + }); + }); +} +// Returns the local path to the specified target. If the target is not yet +// cached locally, the provided TUF Updater will be used to download and +// cache the target. +async function getTargetPath(tuf, target) { + let targetInfo; + try { + targetInfo = await tuf.getTargetInfo(target); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_REFRESH_METADATA_ERROR', + message: 'error refreshing TUF metadata', + cause: err, + }); + } + if (!targetInfo) { + throw new error_1.TUFError({ + code: 'TUF_FIND_TARGET_ERROR', + message: `target ${target} not found`, + }); + } + let path = await tuf.findCachedTarget(targetInfo); + // An empty path here means the target has not been cached locally, or is + // out of date. In either case, we need to download it. + if (!path) { + try { + path = await tuf.downloadTarget(targetInfo); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_DOWNLOAD_TARGET_ERROR', + message: `error downloading target ${path}`, + cause: err, + }); + } + } + return path; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json new file mode 100644 index 0000000000000..808689dfddf92 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json @@ -0,0 +1,41 @@ +{ + "name": "@sigstore/tuf", + "version": "3.0.0", + "description": "Client for the Sigstore TUF repository", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "seeds.json" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@tufjs/repo-mock": "^3.0.1", + "@types/make-fetch-happen": "^10.0.4" + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json new file mode 100644 index 0000000000000..d1d3c6b5c4604 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json @@ -0,0 +1 @@ +{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
 "signatures": [
  {
   "keyid": "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
   "sig": "30460221008ab1f6f17d4f9e6d7dcf1c88912b6b53cc10388644ae1f09bc37a082cd06003e022100e145ef4c7b782d4e8107b53437e669d0476892ce999903ae33d14448366996e7"
  },
  {
   "keyid": "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
   "sig": "3045022100c768b2f86da99569019c160a081da54ae36c34c0a3120d3cb69b53b7d113758e02204f671518f617b20d46537fae6c3b63bae8913f4f1962156105cc4f019ac35c6a"
  },
  {
   "keyid": "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
   "sig": "3045022100b4434e6995d368d23e74759acd0cb9013c83a5d3511f0f997ec54c456ae4350a022015b0e265d182d2b61dc74e155d98b3c3fbe564ba05286aa14c8df02c9b756516"
  },
  {
   "keyid": "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
   "sig": "304502210082c58411d989eb9f861410857d42381590ec9424dbdaa51e78ed13515431904e0220118185da6a6c2947131c17797e2bb7620ce26e5f301d1ceac5f2a7e58f9dcf2e"
  },
  {
   "keyid": "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70",
   "sig": "3046022100c78513854cae9c32eaa6b88e18912f48006c2757a258f917312caba75948eb9e022100d9e1b4ce0adfe9fd2e2148d7fa27a2f40ba1122bd69da7612d8d1776b013c91d"
  },
  {
   "keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
   "sig": "3045022056483a2d5d9ea9cec6e11eadfb33c484b614298faca15acf1c431b11ed7f734c022100d0c1d726af92a87e4e66459ca5adf38a05b44e1f94318423f954bae8bca5bb2e"
  },
  {
   "keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
   "sig": "3046022100d004de88024c32dc5653a9f4843cfc5215427048ad9600d2cf9c969e6edff3d2022100d9ebb798f5fc66af10899dece014a8628ccf3c5402cd4a4270207472f8f6e712"
  },
  {
   "keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
   "sig": "3046022100b7b09996c45ca2d4b05603e56baefa29718a0b71147cf8c6e66349baa61477df022100c4da80c717b4fa7bba0fd5c72da8a0499358b01358b2309f41d1456ea1e7e1d9"
  },
  {
   "keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
   "sig": "3046022100be9782c30744e411a82fa85b5138d601ce148bc19258aec64e7ec24478f38812022100caef63dcaf1a4b9a500d3bd0e3f164ec18f1b63d7a9460d9acab1066db0f016d"
  },
  {
   "keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
   "sig": "30450220746ec3f8534ce55531d0d01ff64964ef440d1e7d2c4c142409b8e9769f1ada6f022100e3b929fcd93ea18feaa0825887a7210489879a66780c07a83f4bd46e2f09ab3b"
  }
 ],
 "signed": {
  "_type": "root",
  "consistent_snapshot": true,
  "expires": "2025-02-19T08:04:32Z",
  "keys": {
   "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@santiagotorres"
   },
   "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@bobcallaway"
   },
   "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@dlorenc"
   },
   "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-online-uri": "gcpkms://projects/sigstore-root-signing/locations/global/keyRings/root/cryptoKeys/timestamp"
   },
   "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@joshuagl"
   },
   "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@mnm678"
   }
  },
  "roles": {
   "root": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "snapshot": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 3650,
    "x-tuf-on-ci-signing-period": 365
   },
   "targets": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "timestamp": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 7,
    "x-tuf-on-ci-signing-period": 4
   }
  },
  "spec_version": "1.0",
  "version": 10,
  "x-tuf-on-ci-expiry-period": 182,
  "x-tuf-on-ci-signing-period": 31
 }
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/dsse.js new file mode 100644 index 0000000000000..1033fc422aba0 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/dsse.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSESignatureContent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +class DSSESignatureContent { + constructor(env) { + this.env = env; + } + compareDigest(digest) { + return core_1.crypto.bufferEqual(digest, core_1.crypto.digest('sha256', this.env.payload)); + } + compareSignature(signature) { + return core_1.crypto.bufferEqual(signature, this.signature); + } + verifySignature(key) { + return core_1.crypto.verify(this.preAuthEncoding, key, this.signature); + } + get signature() { + return this.env.signatures.length > 0 + ? this.env.signatures[0].sig + : Buffer.from(''); + } + // DSSE Pre-Authentication Encoding + get preAuthEncoding() { + return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload); + } +} +exports.DSSESignatureContent = DSSESignatureContent; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/index.js new file mode 100644 index 0000000000000..4287d8032b75f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/index.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toSignedEntity = toSignedEntity; +exports.signatureContent = signatureContent; +const core_1 = require("@sigstore/core"); +const dsse_1 = require("./dsse"); +const message_1 = require("./message"); +function toSignedEntity(bundle, artifact) { + const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial; + const timestamps = []; + for (const entry of tlogEntries) { + timestamps.push({ + $case: 'transparency-log', + tlogEntry: entry, + }); + } + for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) { + timestamps.push({ + $case: 'timestamp-authority', + timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp), + }); + } + return { + signature: signatureContent(bundle, artifact), + key: key(bundle), + tlogEntries, + timestamps, + }; +} +function signatureContent(bundle, artifact) { + switch (bundle.content.$case) { + case 'dsseEnvelope': + return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope); + case 'messageSignature': + return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact); + } +} +function key(bundle) { + switch (bundle.verificationMaterial.content.$case) { + case 'publicKey': + return { + $case: 'public-key', + hint: bundle.verificationMaterial.content.publicKey.hint, + }; + case 'x509CertificateChain': + return { + $case: 'certificate', + certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain + .certificates[0].rawBytes), + }; + case 'certificate': + return { + $case: 'certificate', + certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes), + }; + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/message.js new file mode 100644 index 0000000000000..836148c68a8b6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/message.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureContent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +class MessageSignatureContent { + constructor(messageSignature, artifact) { + this.signature = messageSignature.signature; + this.messageDigest = messageSignature.messageDigest.digest; + this.artifact = artifact; + } + compareSignature(signature) { + return core_1.crypto.bufferEqual(signature, this.signature); + } + compareDigest(digest) { + return core_1.crypto.bufferEqual(digest, this.messageDigest); + } + verifySignature(key) { + return core_1.crypto.verify(this.artifact, key, this.signature); + } +} +exports.MessageSignatureContent = MessageSignatureContent; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/error.js new file mode 100644 index 0000000000000..6cb1cd4121343 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/error.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PolicyError = exports.VerificationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class BaseError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +class VerificationError extends BaseError { +} +exports.VerificationError = VerificationError; +class PolicyError extends BaseError { +} +exports.PolicyError = PolicyError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/index.js new file mode 100644 index 0000000000000..3222876fcd68b --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/index.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0; +/* istanbul ignore file */ +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } }); +Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } }); +var trust_1 = require("./trust"); +Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } }); +var verifier_1 = require("./verifier"); +Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/certificate.js new file mode 100644 index 0000000000000..a916de0e51e71 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/certificate.js @@ -0,0 +1,205 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CertificateChainVerifier = void 0; +exports.verifyCertificateChain = verifyCertificateChain; +const error_1 = require("../error"); +const trust_1 = require("../trust"); +function verifyCertificateChain(leaf, certificateAuthorities) { + // Filter list of trusted CAs to those which are valid for the given + // leaf certificate. + const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, { + start: leaf.notBefore, + end: leaf.notAfter, + }); + /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ + let error; + for (const ca of cas) { + try { + const verifier = new CertificateChainVerifier({ + trustedCerts: ca.certChain, + untrustedCert: leaf, + }); + return verifier.verify(); + } + catch (err) { + error = err; + } + } + // If we failed to verify the certificate chain for all of the trusted + // CAs, throw the last error we encountered. + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'Failed to verify certificate chain', + cause: error, + }); +} +class CertificateChainVerifier { + constructor(opts) { + this.untrustedCert = opts.untrustedCert; + this.trustedCerts = opts.trustedCerts; + this.localCerts = dedupeCertificates([ + ...opts.trustedCerts, + opts.untrustedCert, + ]); + } + verify() { + // Construct certificate path from leaf to root + const certificatePath = this.sort(); + // Perform validation checks on each certificate in the path + this.checkPath(certificatePath); + // Return verified certificate path + return certificatePath; + } + sort() { + const leafCert = this.untrustedCert; + // Construct all possible paths from the leaf + let paths = this.buildPaths(leafCert); + // Filter for paths which contain a trusted certificate + paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert))); + if (paths.length === 0) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'no trusted certificate path found', + }); + } + // Find the shortest of possible paths + /* istanbul ignore next */ + const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr); + // Construct chain from shortest path + // Removes the last certificate in the path, which will be a second copy + // of the root certificate given that the root is self-signed. + return [leafCert, ...path].slice(0, -1); + } + // Recursively build all possible paths from the leaf to the root + buildPaths(certificate) { + const paths = []; + const issuers = this.findIssuer(certificate); + if (issuers.length === 0) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'no valid certificate path found', + }); + } + for (let i = 0; i < issuers.length; i++) { + const issuer = issuers[i]; + // Base case - issuer is self + if (issuer.equals(certificate)) { + paths.push([certificate]); + continue; + } + // Recursively build path for the issuer + const subPaths = this.buildPaths(issuer); + // Construct paths by appending the issuer to each subpath + for (let j = 0; j < subPaths.length; j++) { + paths.push([issuer, ...subPaths[j]]); + } + } + return paths; + } + // Return all possible issuers for the given certificate + findIssuer(certificate) { + let issuers = []; + let keyIdentifier; + // Exit early if the certificate is self-signed + if (certificate.subject.equals(certificate.issuer)) { + if (certificate.verify()) { + return [certificate]; + } + } + // If the certificate has an authority key identifier, use that + // to find the issuer + if (certificate.extAuthorityKeyID) { + keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier; + // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber + // though Fulcio doesn't appear to use these + } + // Find possible issuers by comparing the authorityKeyID/subjectKeyID + // or issuer/subject. Potential issuers are added to the result array. + this.localCerts.forEach((possibleIssuer) => { + if (keyIdentifier) { + if (possibleIssuer.extSubjectKeyID) { + if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) { + issuers.push(possibleIssuer); + } + return; + } + } + // Fallback to comparing certificate issuer and subject if + // subjectKey/authorityKey extensions are not present + if (possibleIssuer.subject.equals(certificate.issuer)) { + issuers.push(possibleIssuer); + } + }); + // Remove any issuers which fail to verify the certificate + issuers = issuers.filter((issuer) => { + try { + return certificate.verify(issuer); + } + catch (ex) { + /* istanbul ignore next - should never error */ + return false; + } + }); + return issuers; + } + checkPath(path) { + /* istanbul ignore if */ + if (path.length < 1) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'certificate chain must contain at least one certificate', + }); + } + // Ensure that all certificates beyond the leaf are CAs + const validCAs = path.slice(1).every((cert) => cert.isCA); + if (!validCAs) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'intermediate certificate is not a CA', + }); + } + // Certificate's issuer must match the subject of the next certificate + // in the chain + for (let i = path.length - 2; i >= 0; i--) { + /* istanbul ignore if */ + if (!path[i].issuer.equals(path[i + 1].subject)) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'incorrect certificate name chaining', + }); + } + } + // Check pathlength constraints + for (let i = 0; i < path.length; i++) { + const cert = path[i]; + // If the certificate is a CA, check the path length + if (cert.extBasicConstraints?.isCA) { + const pathLength = cert.extBasicConstraints.pathLenConstraint; + // The path length, if set, indicates how many intermediate + // certificates (NOT including the leaf) are allowed to follow. The + // pathLength constraint of any intermediate CA certificate MUST be + // greater than or equal to it's own depth in the chain (with an + // adjustment for the leaf certificate) + if (pathLength !== undefined && pathLength < i - 1) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'path length constraint exceeded', + }); + } + } + } + } +} +exports.CertificateChainVerifier = CertificateChainVerifier; +// Remove duplicate certificates from the array +function dedupeCertificates(certs) { + for (let i = 0; i < certs.length; i++) { + for (let j = i + 1; j < certs.length; j++) { + if (certs[i].equals(certs[j])) { + certs.splice(j, 1); + j--; + } + } + } + return certs; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/index.js new file mode 100644 index 0000000000000..cc894aab95a5d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/index.js @@ -0,0 +1,72 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyPublicKey = verifyPublicKey; +exports.verifyCertificate = verifyCertificate; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const certificate_1 = require("./certificate"); +const sct_1 = require("./sct"); +const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1'; +const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8'; +function verifyPublicKey(hint, timestamps, trustMaterial) { + const key = trustMaterial.publicKey(hint); + timestamps.forEach((timestamp) => { + if (!key.validFor(timestamp)) { + throw new error_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`, + }); + } + }); + return { key: key.publicKey }; +} +function verifyCertificate(leaf, timestamps, trustMaterial) { + // Check that leaf certificate chains to a trusted CA + const path = (0, certificate_1.verifyCertificateChain)(leaf, trustMaterial.certificateAuthorities); + // Check that ALL certificates are valid for ALL of the timestamps + const validForDate = timestamps.every((timestamp) => path.every((cert) => cert.validForDate(timestamp))); + if (!validForDate) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'certificate is not valid or expired at the specified date', + }); + } + return { + scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs), + signer: getSigner(path[0]), + }; +} +function getSigner(cert) { + let issuer; + const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2); + /* istanbul ignore next */ + if (issuerExtension) { + issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii'); + } + else { + issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii'); + } + const identity = { + extensions: { issuer }, + subjectAlternativeName: cert.subjectAltName, + }; + return { + key: core_1.crypto.createPublicKey(cert.publicKey), + identity, + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/sct.js new file mode 100644 index 0000000000000..8eca48738096e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/sct.js @@ -0,0 +1,78 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySCTs = verifySCTs; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +function verifySCTs(cert, issuer, ctlogs) { + let extSCT; + // Verifying the SCT requires that we remove the SCT extension and + // re-encode the TBS structure to DER -- this value is part of the data + // over which the signature is calculated. Since this is a destructive action + // we create a copy of the certificate so we can remove the SCT extension + // without affecting the original certificate. + const clone = cert.clone(); + // Intentionally not using the findExtension method here because we want to + // remove the the SCT extension from the certificate before calculating the + // PreCert structure + for (let i = 0; i < clone.extensions.length; i++) { + const ext = clone.extensions[i]; + if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) { + extSCT = new core_1.X509SCTExtension(ext); + // Remove the extension from the certificate + clone.extensions.splice(i, 1); + break; + } + } + // No SCT extension found to verify + if (!extSCT) { + return []; + } + // Found an SCT extension but it has no SCTs + /* istanbul ignore if -- too difficult to fabricate test case for this */ + if (extSCT.signedCertificateTimestamps.length === 0) { + return []; + } + // Construct the PreCert structure + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + const preCert = new core_1.ByteStream(); + // Calculate hash of the issuer's public key + const issuerId = core_1.crypto.digest('sha256', issuer.publicKey); + preCert.appendView(issuerId); + // Re-encodes the certificate to DER after removing the SCT extension + const tbs = clone.tbsCertificate.toDER(); + preCert.appendUint24(tbs.length); + preCert.appendView(tbs); + // Calculate and return the verification results for each SCT + return extSCT.signedCertificateTimestamps.map((sct) => { + // Find the ctlog instance that corresponds to the SCT's logID + const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, { + logID: sct.logID, + targetDate: sct.datetime, + }); + // See if the SCT is valid for any of the CT logs + const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey)); + if (!verified) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'SCT verification failed', + }); + } + return sct.logID; + }); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/policy.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/policy.js new file mode 100644 index 0000000000000..f5960cf047b84 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/policy.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySubjectAlternativeName = verifySubjectAlternativeName; +exports.verifyExtensions = verifyExtensions; +const error_1 = require("./error"); +function verifySubjectAlternativeName(policyIdentity, signerIdentity) { + if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) { + throw new error_1.PolicyError({ + code: 'UNTRUSTED_SIGNER_ERROR', + message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`, + }); + } +} +function verifyExtensions(policyExtensions, signerExtensions = {}) { + let key; + for (key in policyExtensions) { + if (signerExtensions[key] !== policyExtensions[key]) { + throw new error_1.PolicyError({ + code: 'UNTRUSTED_SIGNER_ERROR', + message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`, + }); + } + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/shared.types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/shared.types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js new file mode 100644 index 0000000000000..46619b675f886 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js @@ -0,0 +1,157 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyCheckpoint = verifyCheckpoint; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +// Separator between the note and the signatures in a checkpoint +const CHECKPOINT_SEPARATOR = '\n\n'; +// Checkpoint signatures are of the following form: +// "– \n" +// where: +// - the prefix is an emdash (U+2014). +// - gives a human-readable representation of the signing ID. +// - is the first 4 bytes of the SHA256 hash of the +// associated public key followed by the signature bytes. +const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g; +// Verifies the checkpoint value in the given tlog entry. There are two steps +// to the verification: +// 1. Verify that all signatures in the checkpoint can be verified against a +// trusted public key +// 2. Verify that the root hash in the checkpoint matches the root hash in the +// inclusion proof +// See: https://github.com/transparency-dev/formats/blob/main/log/README.md +function verifyCheckpoint(entry, tlogs) { + // Filter tlog instances to just those which were valid at the time of the + // entry + const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { + targetDate: new Date(Number(entry.integratedTime) * 1000), + }); + const inclusionProof = entry.inclusionProof; + const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope); + const checkpoint = LogCheckpoint.fromString(signedNote.note); + // Verify that the signatures in the checkpoint are all valid + if (!verifySignedNote(signedNote, validTLogs)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'invalid checkpoint signature', + }); + } + // Verify that the root hash from the checkpoint matches the root hash in the + // inclusion proof + if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'root hash mismatch', + }); + } +} +// Verifies the signatures in the SignedNote. For each signature, the +// corresponding transparency log is looked up by the key hint and the +// signature is verified against the public key in the transparency log. +// Throws an error if any of the signatures are invalid. +function verifySignedNote(signedNote, tlogs) { + const data = Buffer.from(signedNote.note, 'utf-8'); + return signedNote.signatures.every((signature) => { + // Find the transparency log instance with the matching key hint + const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint)); + if (!tlog) { + return false; + } + return core_1.crypto.verify(data, tlog.publicKey, signature.signature); + }); +} +// SignedNote represents a signed note from a transparency log checkpoint. Consists +// of a body (or note) and one more signatures calculated over the body. See +// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope +class SignedNote { + constructor(note, signatures) { + this.note = note; + this.signatures = signatures; + } + // Deserialize a SignedNote from a string + static fromString(envelope) { + if (!envelope.includes(CHECKPOINT_SEPARATOR)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'missing checkpoint separator', + }); + } + // Split the note into the header and the data portions at the separator + const split = envelope.indexOf(CHECKPOINT_SEPARATOR); + const header = envelope.slice(0, split + 1); + const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length); + // Find all the signature lines in the data portion + const matches = data.matchAll(SIGNATURE_REGEX); + // Parse each of the matched signature lines into the name and signature. + // The first four bytes of the signature are the key hint (should match the + // first four bytes of the log ID), and the rest is the signature itself. + const signatures = Array.from(matches, (match) => { + const [, name, signature] = match; + const sigBytes = Buffer.from(signature, 'base64'); + if (sigBytes.length < 5) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'malformed checkpoint signature', + }); + } + return { + name, + keyHint: sigBytes.subarray(0, 4), + signature: sigBytes.subarray(4), + }; + }); + if (signatures.length === 0) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'no signatures found in checkpoint', + }); + } + return new SignedNote(header, signatures); + } +} +// LogCheckpoint represents a transparency log checkpoint. Consists of the +// following: +// - origin: the name of the transparency log +// - logSize: the size of the log at the time of the checkpoint +// - logHash: the root hash of the log at the time of the checkpoint +// - rest: the rest of the checkpoint body, which is a list of log entries +// See: +// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body +class LogCheckpoint { + constructor(origin, logSize, logHash, rest) { + this.origin = origin; + this.logSize = logSize; + this.logHash = logHash; + this.rest = rest; + } + static fromString(note) { + const lines = note.trimEnd().split('\n'); + if (lines.length < 3) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'too few lines in checkpoint header', + }); + } + const origin = lines[0]; + const logSize = BigInt(lines[1]); + const rootHash = Buffer.from(lines[2], 'base64'); + const rest = lines.slice(3); + return new LogCheckpoint(origin, logSize, rootHash, rest); + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/index.js new file mode 100644 index 0000000000000..56e948de19338 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/index.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTSATimestamp = verifyTSATimestamp; +exports.verifyTLogTimestamp = verifyTLogTimestamp; +const error_1 = require("../error"); +const checkpoint_1 = require("./checkpoint"); +const merkle_1 = require("./merkle"); +const set_1 = require("./set"); +const tsa_1 = require("./tsa"); +function verifyTSATimestamp(timestamp, data, timestampAuthorities) { + (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities); + return { + type: 'timestamp-authority', + logID: timestamp.signerSerialNumber, + timestamp: timestamp.signingTime, + }; +} +function verifyTLogTimestamp(entry, tlogAuthorities) { + let inclusionVerified = false; + if (isTLogEntryWithInclusionPromise(entry)) { + (0, set_1.verifyTLogSET)(entry, tlogAuthorities); + inclusionVerified = true; + } + if (isTLogEntryWithInclusionProof(entry)) { + (0, merkle_1.verifyMerkleInclusion)(entry); + (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities); + inclusionVerified = true; + } + if (!inclusionVerified) { + throw new error_1.VerificationError({ + code: 'TLOG_MISSING_INCLUSION_ERROR', + message: 'inclusion could not be verified', + }); + } + return { + type: 'transparency-log', + logID: entry.logId.keyId, + timestamp: new Date(Number(entry.integratedTime) * 1000), + }; +} +function isTLogEntryWithInclusionPromise(entry) { + return entry.inclusionPromise !== undefined; +} +function isTLogEntryWithInclusionProof(entry) { + return entry.inclusionProof !== undefined; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/merkle.js new file mode 100644 index 0000000000000..f57cae42002bd --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/merkle.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyMerkleInclusion = verifyMerkleInclusion; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]); +const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]); +function verifyMerkleInclusion(entry) { + const inclusionProof = entry.inclusionProof; + const logIndex = BigInt(inclusionProof.logIndex); + const treeSize = BigInt(inclusionProof.treeSize); + if (logIndex < 0n || logIndex >= treeSize) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: `invalid index: ${logIndex}`, + }); + } + // Figure out which subset of hashes corresponds to the inner and border + // nodes + const { inner, border } = decompInclProof(logIndex, treeSize); + if (inclusionProof.hashes.length !== inner + border) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'invalid hash count', + }); + } + const innerHashes = inclusionProof.hashes.slice(0, inner); + const borderHashes = inclusionProof.hashes.slice(inner); + // The entry's hash is the leaf hash + const leafHash = hashLeaf(entry.canonicalizedBody); + // Chain the hashes belonging to the inner and border portions + const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes); + // Calculated hash should match the root hash in the inclusion proof + if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'calculated root hash does not match inclusion proof', + }); + } +} +// Breaks down inclusion proof for a leaf at the specified index in a tree of +// the specified size. The split point is where paths to the index leaf and +// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof +// parts. +function decompInclProof(index, size) { + const inner = innerProofSize(index, size); + const border = onesCount(index >> BigInt(inner)); + return { inner, border }; +} +// Computes a subtree hash for a node on or below the tree's right border. +// Assumes the provided proof hashes are ordered from lower to higher levels +// and seed is the initial hash of the node specified by the index. +function chainInner(seed, hashes, index) { + return hashes.reduce((acc, h, i) => { + if ((index >> BigInt(i)) & BigInt(1)) { + return hashChildren(h, acc); + } + else { + return hashChildren(acc, h); + } + }, seed); +} +// Computes a subtree hash for nodes along the tree's right border. +function chainBorderRight(seed, hashes) { + return hashes.reduce((acc, h) => hashChildren(h, acc), seed); +} +function innerProofSize(index, size) { + return bitLength(index ^ (size - BigInt(1))); +} +// Counts the number of ones in the binary representation of the given number. +// https://en.wikipedia.org/wiki/Hamming_weight +function onesCount(num) { + return num.toString(2).split('1').length - 1; +} +// Returns the number of bits necessary to represent an integer in binary. +function bitLength(n) { + if (n === 0n) { + return 0; + } + return n.toString(2).length; +} +// Hashing logic according to RFC6962. +// https://datatracker.ietf.org/doc/html/rfc6962#section-2 +function hashChildren(left, right) { + return core_1.crypto.digest('sha256', RFC6962_NODE_HASH_PREFIX, left, right); +} +function hashLeaf(leaf) { + return core_1.crypto.digest('sha256', RFC6962_LEAF_HASH_PREFIX, leaf); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/set.js new file mode 100644 index 0000000000000..5d3f47bb88746 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/set.js @@ -0,0 +1,60 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTLogSET = verifyTLogSET; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +// Verifies the SET for the given entry against the list of trusted +// transparency logs. Returns true if the SET can be verified against at least +// one of the trusted logs; otherwise, returns false. +function verifyTLogSET(entry, tlogs) { + // Filter the list of tlog instances to only those which might be able to + // verify the SET + const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { + logID: entry.logId.keyId, + targetDate: new Date(Number(entry.integratedTime) * 1000), + }); + // Check to see if we can verify the SET against any of the valid tlogs + const verified = validTLogs.some((tlog) => { + // Re-create the original Rekor verification payload + const payload = toVerificationPayload(entry); + // Canonicalize the payload and turn into a buffer for verification + const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8'); + // Extract the SET from the tlog entry + const signature = entry.inclusionPromise.signedEntryTimestamp; + return core_1.crypto.verify(data, tlog.publicKey, signature); + }); + if (!verified) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROMISE_ERROR', + message: 'inclusion promise could not be verified', + }); + } +} +// Returns a properly formatted "VerificationPayload" for one of the +// transaction log entires in the given bundle which can be used for SET +// verification. +function toVerificationPayload(entry) { + const { integratedTime, logIndex, logId, canonicalizedBody } = entry; + return { + body: canonicalizedBody.toString('base64'), + integratedTime: Number(integratedTime), + logIndex: Number(logIndex), + logID: logId.keyId.toString('hex'), + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/tsa.js new file mode 100644 index 0000000000000..70388cd06c52d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/tsa.js @@ -0,0 +1,73 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp; +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const certificate_1 = require("../key/certificate"); +const trust_1 = require("../trust"); +function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) { + const signingTime = timestamp.signingTime; + // Filter for CAs which were valid at the time of signing + timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, { + start: signingTime, + end: signingTime, + }); + // Filter for CAs which match serial and issuer embedded in the timestamp + timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, { + serialNumber: timestamp.signerSerialNumber, + issuer: timestamp.signerIssuer, + }); + // Check that we can verify the timestamp with AT LEAST ONE of the remaining + // CAs + const verified = timestampAuthorities.some((ca) => { + try { + verifyTimestampForCA(timestamp, data, ca); + return true; + } + catch (e) { + return false; + } + }); + if (!verified) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'timestamp could not be verified', + }); + } +} +function verifyTimestampForCA(timestamp, data, ca) { + const [leaf, ...cas] = ca.certChain; + const signingKey = core_1.crypto.createPublicKey(leaf.publicKey); + const signingTime = timestamp.signingTime; + // Verify the certificate chain for the provided CA + try { + new certificate_1.CertificateChainVerifier({ + untrustedCert: leaf, + trustedCerts: cas, + }).verify(); + } + catch (e) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'invalid certificate chain', + }); + } + // Check that all of the CA certs were valid at the time of signing + const validAtSigningTime = ca.certChain.every((cert) => cert.validForDate(signingTime)); + if (!validAtSigningTime) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'timestamp was signed with an expired certificate', + }); + } + // Check that the signing certificate's key can be used to verify the + // timestamp signature. + timestamp.verify(data, signingKey); +} +// Filters the list of CAs to those which have a leaf signing certificate which +// matches the given serial number and issuer. +function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) { + return timestampAuthorities.filter((ca) => ca.certChain.length > 0 && + core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) && + core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer)); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/dsse.js new file mode 100644 index 0000000000000..d71ed8c6e7ad9 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/dsse.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyDSSETLogBody = verifyDSSETLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given intoto tlog entry to the given bundle +function verifyDSSETLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.1': + return verifyDSSE001TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported dsse version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope. +function verifyDSSE001TLogBody(tlogEntry, content) { + // Ensure the bundle's DSSE only contains a single signature + if (tlogEntry.spec.signatures?.length !== 1) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature count mismatch', + }); + } + const tlogSig = tlogEntry.spec.signatures[0].signature; + // Ensure that the signature in the bundle's DSSE matches tlog entry + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'tlog entry signature mismatch', + }); + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const tlogHash = tlogEntry.spec.payloadHash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'DSSE payload hash mismatch', + }); + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js new file mode 100644 index 0000000000000..c4aa345b57ba7 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given hashedrekord tlog entry to the given bundle +function verifyHashedRekordTLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.1': + return verifyHashedrekord001TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given hashedrekord v0.0.1 tlog entry to the given message +// signature +function verifyHashedrekord001TLogBody(tlogEntry, content) { + // Ensure that the bundles message signature matches the tlog entry + const tlogSig = tlogEntry.spec.signature.content || ''; + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature mismatch', + }); + } + // Ensure that the bundle's message digest matches the tlog entry + const tlogDigest = tlogEntry.spec.data.hash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'digest mismatch', + }); + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/index.js new file mode 100644 index 0000000000000..da235360c594a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/index.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTLogBody = verifyTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +const dsse_1 = require("./dsse"); +const hashedrekord_1 = require("./hashedrekord"); +const intoto_1 = require("./intoto"); +// Verifies that the given tlog entry matches the supplied signature content. +function verifyTLogBody(entry, sigContent) { + const { kind, version } = entry.kindVersion; + const body = JSON.parse(entry.canonicalizedBody.toString('utf8')); + if (kind !== body.kind || version !== body.apiVersion) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`, + }); + } + switch (body.kind) { + case 'dsse': + return (0, dsse_1.verifyDSSETLogBody)(body, sigContent); + case 'intoto': + return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent); + case 'hashedrekord': + return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent); + /* istanbul ignore next */ + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported kind: ${kind}`, + }); + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/intoto.js new file mode 100644 index 0000000000000..9096ae9418cc3 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/intoto.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyIntotoTLogBody = verifyIntotoTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given intoto tlog entry to the given bundle +function verifyIntotoTLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.2': + return verifyIntoto002TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported intoto version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope. +function verifyIntoto002TLogBody(tlogEntry, content) { + // Ensure the bundle's DSSE contains a single signature + if (tlogEntry.spec.content.envelope.signatures?.length !== 1) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature count mismatch', + }); + } + // Signature is double-base64-encoded in the tlog entry + const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig); + // Ensure that the signature in the bundle's DSSE matches tlog entry + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'tlog entry signature mismatch', + }); + } + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const tlogHash = tlogEntry.spec.content.payloadHash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'DSSE payload hash mismatch', + }); + } +} +function base64Decode(str) { + return Buffer.from(str, 'base64').toString('utf-8'); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/filter.js new file mode 100644 index 0000000000000..880a16cf1940e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/filter.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.filterCertAuthorities = filterCertAuthorities; +exports.filterTLogAuthorities = filterTLogAuthorities; +function filterCertAuthorities(certAuthorities, criteria) { + return certAuthorities.filter((ca) => { + return (ca.validFor.start <= criteria.start && ca.validFor.end >= criteria.end); + }); +} +// Filter the list of tlog instances to only those which match the given log +// ID and have public keys which are valid for the given integrated time. +function filterTLogAuthorities(tlogAuthorities, criteria) { + return tlogAuthorities.filter((tlog) => { + // If we're filtering by log ID and the log IDs don't match, we can't use + // this tlog + if (criteria.logID && !tlog.logID.equals(criteria.logID)) { + return false; + } + // Check that the integrated time is within the validFor range + return (tlog.validFor.start <= criteria.targetDate && + criteria.targetDate <= tlog.validFor.end); + }); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/index.js new file mode 100644 index 0000000000000..bfab2eb4f9975 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/index.js @@ -0,0 +1,86 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0; +exports.toTrustMaterial = toTrustMaterial; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const error_1 = require("../error"); +const BEGINNING_OF_TIME = new Date(0); +const END_OF_TIME = new Date(8640000000000000); +var filter_1 = require("./filter"); +Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } }); +Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } }); +function toTrustMaterial(root, keys) { + const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys); + return { + certificateAuthorities: root.certificateAuthorities.map(createCertAuthority), + timestampAuthorities: root.timestampAuthorities.map(createCertAuthority), + tlogs: root.tlogs.map(createTLogAuthority), + ctlogs: root.ctlogs.map(createTLogAuthority), + publicKey: keyFinder, + }; +} +function createTLogAuthority(tlogInstance) { + const keyDetails = tlogInstance.publicKey.keyDetails; + const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256 + ? 'pkcs1' + : 'spki'; + return { + logID: tlogInstance.logId.keyId, + publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType), + validFor: { + start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME, + end: tlogInstance.publicKey.validFor?.end || END_OF_TIME, + }, + }; +} +function createCertAuthority(ca) { + /* istanbul ignore next */ + return { + certChain: ca.certChain.certificates.map((cert) => { + return core_1.X509Certificate.parse(cert.rawBytes); + }), + validFor: { + start: ca.validFor?.start || BEGINNING_OF_TIME, + end: ca.validFor?.end || END_OF_TIME, + }, + }; +} +function keyLocator(keys) { + return (hint) => { + const key = (keys || {})[hint]; + if (!key) { + throw new error_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `key not found: ${hint}`, + }); + } + return { + publicKey: core_1.crypto.createPublicKey(key.rawBytes), + validFor: (date) => { + /* istanbul ignore next */ + return ((key.validFor?.start || BEGINNING_OF_TIME) <= date && + (key.validFor?.end || END_OF_TIME) >= date); + }, + }; + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/trust.types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/trust.types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/verifier.js new file mode 100644 index 0000000000000..829727cd1d40a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/verifier.js @@ -0,0 +1,141 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Verifier = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("util"); +const error_1 = require("./error"); +const key_1 = require("./key"); +const policy_1 = require("./policy"); +const timestamp_1 = require("./timestamp"); +const tlog_1 = require("./tlog"); +class Verifier { + constructor(trustMaterial, options = {}) { + this.trustMaterial = trustMaterial; + this.options = { + ctlogThreshold: options.ctlogThreshold ?? 1, + tlogThreshold: options.tlogThreshold ?? 1, + tsaThreshold: options.tsaThreshold ?? 0, + }; + } + verify(entity, policy) { + const timestamps = this.verifyTimestamps(entity); + const signer = this.verifySigningKey(entity, timestamps); + this.verifyTLogs(entity); + this.verifySignature(entity, signer); + if (policy) { + this.verifyPolicy(policy, signer.identity || {}); + } + return signer; + } + // Checks that all of the timestamps in the entity are valid and returns them + verifyTimestamps(entity) { + let tlogCount = 0; + let tsaCount = 0; + const timestamps = entity.timestamps.map((timestamp) => { + switch (timestamp.$case) { + case 'timestamp-authority': + tsaCount++; + return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities); + case 'transparency-log': + tlogCount++; + return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs); + } + }); + // Check for duplicate timestamps + if (containsDupes(timestamps)) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'duplicate timestamp', + }); + } + if (tlogCount < this.options.tlogThreshold) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`, + }); + } + if (tsaCount < this.options.tsaThreshold) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`, + }); + } + return timestamps.map((t) => t.timestamp); + } + // Checks that the signing key is valid for all of the the supplied timestamps + // and returns the signer. + verifySigningKey({ key }, timestamps) { + switch (key.$case) { + case 'public-key': { + return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial); + } + case 'certificate': { + const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial); + /* istanbul ignore next - no fixture */ + if (containsDupes(result.scts)) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'duplicate SCT', + }); + } + if (result.scts.length < this.options.ctlogThreshold) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`, + }); + } + return result.signer; + } + } + } + // Checks that the tlog entries are valid for the supplied content + verifyTLogs({ signature: content, tlogEntries }) { + tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content)); + } + // Checks that the signature is valid for the supplied content + verifySignature(entity, signer) { + if (!entity.signature.verifySignature(signer.key)) { + throw new error_1.VerificationError({ + code: 'SIGNATURE_ERROR', + message: 'signature verification failed', + }); + } + } + verifyPolicy(policy, identity) { + // Check the subject alternative name of the signer matches the policy + if (policy.subjectAlternativeName) { + (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName); + } + // Check that the extensions of the signer match the policy + if (policy.extensions) { + (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions); + } + } +} +exports.Verifier = Verifier; +// Checks for duplicate items in the array. Objects are compared using +// deep equality. +function containsDupes(arr) { + for (let i = 0; i < arr.length; i++) { + for (let j = i + 1; j < arr.length; j++) { + if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) { + return true; + } + } + } + return false; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/package.json new file mode 100644 index 0000000000000..edf72b8bfd968 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/package.json @@ -0,0 +1,36 @@ +{ + "name": "@sigstore/verify", + "version": "2.0.0", + "description": "Verification of Sigstore signatures", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/base.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/base.js new file mode 100644 index 0000000000000..85e45d8fc1151 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/base.js @@ -0,0 +1,92 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signed = exports.MetadataKind = void 0; +exports.isMetadataKind = isMetadataKind; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const SPECIFICATION_VERSION = ['1', '0', '31']; +var MetadataKind; +(function (MetadataKind) { + MetadataKind["Root"] = "root"; + MetadataKind["Timestamp"] = "timestamp"; + MetadataKind["Snapshot"] = "snapshot"; + MetadataKind["Targets"] = "targets"; +})(MetadataKind || (exports.MetadataKind = MetadataKind = {})); +function isMetadataKind(value) { + return (typeof value === 'string' && + Object.values(MetadataKind).includes(value)); +} +/*** + * A base class for the signed part of TUF metadata. + * + * Objects with base class Signed are usually included in a ``Metadata`` object + * on the signed attribute. This class provides attributes and methods that + * are common for all TUF metadata types (roles). + */ +class Signed { + constructor(options) { + this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.'); + const specList = this.specVersion.split('.'); + if (!(specList.length === 2 || specList.length === 3) || + !specList.every((item) => isNumeric(item))) { + throw new error_1.ValueError('Failed to parse specVersion'); + } + // major version must match + if (specList[0] != SPECIFICATION_VERSION[0]) { + throw new error_1.ValueError('Unsupported specVersion'); + } + this.expires = options.expires; + this.version = options.version; + this.unrecognizedFields = options.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Signed)) { + return false; + } + return (this.specVersion === other.specVersion && + this.expires === other.expires && + this.version === other.version && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + isExpired(referenceTime) { + if (!referenceTime) { + referenceTime = new Date(); + } + return referenceTime >= new Date(this.expires); + } + static commonFieldsFromJSON(data) { + const { spec_version, expires, version, ...rest } = data; + if (!utils_1.guard.isDefined(spec_version)) { + throw new error_1.ValueError('spec_version is not defined'); + } + else if (typeof spec_version !== 'string') { + throw new TypeError('spec_version must be a string'); + } + if (!utils_1.guard.isDefined(expires)) { + throw new error_1.ValueError('expires is not defined'); + } + else if (!(typeof expires === 'string')) { + throw new TypeError('expires must be a string'); + } + if (!utils_1.guard.isDefined(version)) { + throw new error_1.ValueError('version is not defined'); + } + else if (!(typeof version === 'number')) { + throw new TypeError('version must be a number'); + } + return { + specVersion: spec_version, + expires, + version, + unrecognizedFields: rest, + }; + } +} +exports.Signed = Signed; +function isNumeric(str) { + return !isNaN(Number(str)); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/delegations.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/delegations.js new file mode 100644 index 0000000000000..7165f1e244393 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/delegations.js @@ -0,0 +1,115 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Delegations = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container object storing information about all delegations. + * + * Targets roles that are trusted to provide signed metadata files + * describing targets with designated pathnames and/or further delegations. + */ +class Delegations { + constructor(options) { + this.keys = options.keys; + this.unrecognizedFields = options.unrecognizedFields || {}; + if (options.roles) { + if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) { + throw new error_1.ValueError('Delegated role name conflicts with top-level role name'); + } + } + this.succinctRoles = options.succinctRoles; + this.roles = options.roles; + } + equals(other) { + if (!(other instanceof Delegations)) { + return false; + } + return (util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) && + util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles)); + } + *rolesForTarget(targetPath) { + if (this.roles) { + for (const role of Object.values(this.roles)) { + if (role.isDelegatedPath(targetPath)) { + yield { role: role.name, terminating: role.terminating }; + } + } + } + else if (this.succinctRoles) { + yield { + role: this.succinctRoles.getRoleForTarget(targetPath), + terminating: true, + }; + } + } + toJSON() { + const json = { + keys: keysToJSON(this.keys), + ...this.unrecognizedFields, + }; + if (this.roles) { + json.roles = rolesToJSON(this.roles); + } + else if (this.succinctRoles) { + json.succinct_roles = this.succinctRoles.toJSON(); + } + return json; + } + static fromJSON(data) { + const { keys, roles, succinct_roles, ...unrecognizedFields } = data; + let succinctRoles; + if (utils_1.guard.isObject(succinct_roles)) { + succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); + } + return new Delegations({ + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + unrecognizedFields, + succinctRoles, + }); + } +} +exports.Delegations = Delegations; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyId, key]) => ({ + ...acc, + [keyId]: key.toJSON(), + }), {}); +} +function rolesToJSON(roles) { + return Object.values(roles).map((role) => role.toJSON()); +} +function keysFromJSON(data) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys is malformed'); + } + return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); +} +function rolesFromJSON(data) { + let roleMap; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectArray(data)) { + throw new TypeError('roles is malformed'); + } + roleMap = data.reduce((acc, role) => { + const delegatedRole = role_1.DelegatedRole.fromJSON(role); + return { + ...acc, + [delegatedRole.name]: delegatedRole, + }; + }, {}); + } + return roleMap; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/error.js new file mode 100644 index 0000000000000..ba80698747ba0 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/error.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error about metadata object with insufficient threshold of signatures. +class UnsignedMetadataError extends RepositoryError { +} +exports.UnsignedMetadataError = UnsignedMetadataError; +// An error while checking the length and hash values of an object. +class LengthOrHashMismatchError extends RepositoryError { +} +exports.LengthOrHashMismatchError = LengthOrHashMismatchError; +class CryptoError extends Error { +} +exports.CryptoError = CryptoError; +class UnsupportedAlgorithmError extends CryptoError { +} +exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/file.js new file mode 100644 index 0000000000000..b35fe5950bbb7 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/file.js @@ -0,0 +1,183 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TargetFile = exports.MetaFile = void 0; +const crypto_1 = __importDefault(require("crypto")); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +// A container with information about a particular metadata file. +// +// This class is used for Timestamp and Snapshot metadata. +class MetaFile { + constructor(opts) { + if (opts.version <= 0) { + throw new error_1.ValueError('Metafile version must be at least 1'); + } + if (opts.length !== undefined) { + validateLength(opts.length); + } + this.version = opts.version; + this.length = opts.length; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof MetaFile)) { + return false; + } + return (this.version === other.version && + this.length === other.length && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + verify(data) { + // Verifies that the given data matches the expected length. + if (this.length !== undefined) { + if (data.length !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`); + } + } + // Verifies that the given data matches the supplied hashes. + if (this.hashes) { + Object.entries(this.hashes).forEach(([key, value]) => { + let hash; + try { + hash = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + const observedHash = hash.update(data).digest('hex'); + if (observedHash !== value) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`); + } + }); + } + } + toJSON() { + const json = { + version: this.version, + ...this.unrecognizedFields, + }; + if (this.length !== undefined) { + json.length = this.length; + } + if (this.hashes) { + json.hashes = this.hashes; + } + return json; + } + static fromJSON(data) { + const { version, length, hashes, ...rest } = data; + if (typeof version !== 'number') { + throw new TypeError('version must be a number'); + } + if (utils_1.guard.isDefined(length) && typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must be string keys and values'); + } + return new MetaFile({ + version, + length, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.MetaFile = MetaFile; +// Container for info about a particular target file. +// +// This class is used for Target metadata. +class TargetFile { + constructor(opts) { + validateLength(opts.length); + this.length = opts.length; + this.path = opts.path; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + get custom() { + const custom = this.unrecognizedFields['custom']; + if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) { + return {}; + } + return custom; + } + equals(other) { + if (!(other instanceof TargetFile)) { + return false; + } + return (this.length === other.length && + this.path === other.path && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + async verify(stream) { + let observedLength = 0; + // Create a digest for each hash algorithm + const digests = Object.keys(this.hashes).reduce((acc, key) => { + try { + acc[key] = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + return acc; + }, {}); + // Read stream chunk by chunk + for await (const chunk of stream) { + // Keep running tally of stream length + observedLength += chunk.length; + // Append chunk to each digest + Object.values(digests).forEach((digest) => { + digest.update(chunk); + }); + } + // Verify length matches expected value + if (observedLength !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`); + } + // Verify each digest matches expected value + Object.entries(digests).forEach(([key, value]) => { + const expected = this.hashes[key]; + const actual = value.digest('hex'); + if (actual !== expected) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`); + } + }); + } + toJSON() { + return { + length: this.length, + hashes: this.hashes, + ...this.unrecognizedFields, + }; + } + static fromJSON(path, data) { + const { length, hashes, ...rest } = data; + if (typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (!utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must have string keys and values'); + } + return new TargetFile({ + length, + path, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.TargetFile = TargetFile; +// Check that supplied length if valid +function validateLength(length) { + if (length < 0) { + throw new error_1.ValueError('Length must be at least 0'); + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/index.js new file mode 100644 index 0000000000000..a4dc783659f04 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; +var base_1 = require("./base"); +Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); +var file_1 = require("./file"); +Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); +var key_1 = require("./key"); +Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); +var metadata_1 = require("./metadata"); +Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); +var root_1 = require("./root"); +Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); +var signature_1 = require("./signature"); +Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); +var snapshot_1 = require("./snapshot"); +Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); +var targets_1 = require("./targets"); +Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/key.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/key.js new file mode 100644 index 0000000000000..5e55b09d7c6dd --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/key.js @@ -0,0 +1,85 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Key = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const key_1 = require("./utils/key"); +// A container class representing the public portion of a Key. +class Key { + constructor(options) { + const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options; + this.keyID = keyID; + this.keyType = keyType; + this.scheme = scheme; + this.keyVal = keyVal; + this.unrecognizedFields = unrecognizedFields || {}; + } + // Verifies the that the metadata.signatures contains a signature made with + // this key and is correctly signed. + verifySignature(metadata) { + const signature = metadata.signatures[this.keyID]; + if (!signature) + throw new error_1.UnsignedMetadataError('no signature for key found in metadata'); + if (!this.keyVal.public) + throw new error_1.UnsignedMetadataError('no public key found'); + const publicKey = (0, key_1.getPublicKey)({ + keyType: this.keyType, + scheme: this.scheme, + keyVal: this.keyVal.public, + }); + const signedData = metadata.signed.toJSON(); + try { + if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + catch (error) { + if (error instanceof error_1.UnsignedMetadataError) { + throw error; + } + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + equals(other) { + if (!(other instanceof Key)) { + return false; + } + return (this.keyID === other.keyID && + this.keyType === other.keyType && + this.scheme === other.scheme && + util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keytype: this.keyType, + scheme: this.scheme, + keyval: this.keyVal, + ...this.unrecognizedFields, + }; + } + static fromJSON(keyID, data) { + const { keytype, scheme, keyval, ...rest } = data; + if (typeof keytype !== 'string') { + throw new TypeError('keytype must be a string'); + } + if (typeof scheme !== 'string') { + throw new TypeError('scheme must be a string'); + } + if (!utils_1.guard.isStringRecord(keyval)) { + throw new TypeError('keyval must be a string record'); + } + return new Key({ + keyID, + keyType: keytype, + scheme, + keyVal: keyval, + unrecognizedFields: rest, + }); + } +} +exports.Key = Key; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/metadata.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/metadata.js new file mode 100644 index 0000000000000..389d2504e0b53 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/metadata.js @@ -0,0 +1,160 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Metadata = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const root_1 = require("./root"); +const signature_1 = require("./signature"); +const snapshot_1 = require("./snapshot"); +const targets_1 = require("./targets"); +const timestamp_1 = require("./timestamp"); +const utils_1 = require("./utils"); +/*** + * A container for signed TUF metadata. + * + * Provides methods to convert to and from json, read and write to and + * from JSON and to create and verify metadata signatures. + * + * ``Metadata[T]`` is a generic container type where T can be any one type of + * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this + * is to allow static type checking of the signed attribute in code using + * Metadata:: + * + * root_md = Metadata[Root].fromJSON("root.json") + * # root_md type is now Metadata[Root]. This means signed and its + * # attributes like consistent_snapshot are now statically typed and the + * # types can be verified by static type checkers and shown by IDEs + * + * Using a type constraint is not required but not doing so means T is not a + * specific type so static typing cannot happen. Note that the type constraint + * ``[Root]`` is not validated at runtime (as pure annotations are not available + * then). + * + * Apart from ``expires`` all of the arguments to the inner constructors have + * reasonable default values for new metadata. + */ +class Metadata { + constructor(signed, signatures, unrecognizedFields) { + this.signed = signed; + this.signatures = signatures || {}; + this.unrecognizedFields = unrecognizedFields || {}; + } + sign(signer, append = true) { + const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON())); + const signature = signer(bytes); + if (!append) { + this.signatures = {}; + } + this.signatures[signature.keyID] = signature; + } + verifyDelegate(delegatedRole, delegatedMetadata) { + let role; + let keys = {}; + switch (this.signed.type) { + case base_1.MetadataKind.Root: + keys = this.signed.keys; + role = this.signed.roles[delegatedRole]; + break; + case base_1.MetadataKind.Targets: + if (!this.signed.delegations) { + throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); + } + keys = this.signed.delegations.keys; + if (this.signed.delegations.roles) { + role = this.signed.delegations.roles[delegatedRole]; + } + else if (this.signed.delegations.succinctRoles) { + if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) { + role = this.signed.delegations.succinctRoles; + } + } + break; + default: + throw new TypeError('invalid metadata type'); + } + if (!role) { + throw new error_1.ValueError(`no delegation found for ${delegatedRole}`); + } + const signingKeys = new Set(); + role.keyIDs.forEach((keyID) => { + const key = keys[keyID]; + // If we dont' have the key, continue checking other keys + if (!key) { + return; + } + try { + key.verifySignature(delegatedMetadata); + signingKeys.add(key.keyID); + } + catch (error) { + // continue + } + }); + if (signingKeys.size < role.threshold) { + throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`); + } + } + equals(other) { + if (!(other instanceof Metadata)) { + return false; + } + return (this.signed.equals(other.signed) && + util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + const signatures = Object.values(this.signatures).map((signature) => { + return signature.toJSON(); + }); + return { + signatures, + signed: this.signed.toJSON(), + ...this.unrecognizedFields, + }; + } + static fromJSON(type, data) { + const { signed, signatures, ...rest } = data; + if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { + throw new TypeError('signed is not defined'); + } + if (type !== signed._type) { + throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`); + } + if (!utils_1.guard.isObjectArray(signatures)) { + throw new TypeError('signatures is not an array'); + } + let signedObj; + switch (type) { + case base_1.MetadataKind.Root: + signedObj = root_1.Root.fromJSON(signed); + break; + case base_1.MetadataKind.Timestamp: + signedObj = timestamp_1.Timestamp.fromJSON(signed); + break; + case base_1.MetadataKind.Snapshot: + signedObj = snapshot_1.Snapshot.fromJSON(signed); + break; + case base_1.MetadataKind.Targets: + signedObj = targets_1.Targets.fromJSON(signed); + break; + default: + throw new TypeError('invalid metadata type'); + } + const sigMap = {}; + // Ensure that each signature is unique + signatures.forEach((sigData) => { + const sig = signature_1.Signature.fromJSON(sigData); + if (sigMap[sig.keyID]) { + throw new error_1.ValueError(`multiple signatures found for keyid: ${sig.keyID}`); + } + sigMap[sig.keyID] = sig; + }); + return new Metadata(signedObj, sigMap, rest); + } +} +exports.Metadata = Metadata; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/role.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/role.js new file mode 100644 index 0000000000000..f7ddbc6fe3f38 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/role.js @@ -0,0 +1,299 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0; +const crypto_1 = __importDefault(require("crypto")); +const minimatch_1 = require("minimatch"); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +exports.TOP_LEVEL_ROLE_NAMES = [ + 'root', + 'targets', + 'snapshot', + 'timestamp', +]; +/** + * Container that defines which keys are required to sign roles metadata. + * + * Role defines how many keys are required to successfully sign the roles + * metadata, and which keys are accepted. + */ +class Role { + constructor(options) { + const { keyIDs, threshold, unrecognizedFields } = options; + if (hasDuplicates(keyIDs)) { + throw new error_1.ValueError('duplicate key IDs found'); + } + if (threshold < 1) { + throw new error_1.ValueError('threshold must be at least 1'); + } + this.keyIDs = keyIDs; + this.threshold = threshold; + this.unrecognizedFields = unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Role)) { + return false; + } + return (this.threshold === other.threshold && + util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keyids: this.keyIDs, + threshold: this.threshold, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { keyids, threshold, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + return new Role({ + keyIDs: keyids, + threshold, + unrecognizedFields: rest, + }); + } +} +exports.Role = Role; +function hasDuplicates(array) { + return new Set(array).size !== array.length; +} +/** + * A container with information about a delegated role. + * + * A delegation can happen in two ways: + * - ``paths`` is set: delegates targets matching any path pattern in ``paths`` + * - ``pathHashPrefixes`` is set: delegates targets whose target path hash + * starts with any of the prefixes in ``pathHashPrefixes`` + * + * ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be + * set, at least one of them must be set. + */ +class DelegatedRole extends Role { + constructor(opts) { + super(opts); + const { name, terminating, paths, pathHashPrefixes } = opts; + this.name = name; + this.terminating = terminating; + if (opts.paths && opts.pathHashPrefixes) { + throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive'); + } + this.paths = paths; + this.pathHashPrefixes = pathHashPrefixes; + } + equals(other) { + if (!(other instanceof DelegatedRole)) { + return false; + } + return (super.equals(other) && + this.name === other.name && + this.terminating === other.terminating && + util_1.default.isDeepStrictEqual(this.paths, other.paths) && + util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes)); + } + isDelegatedPath(targetFilepath) { + if (this.paths) { + return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern)); + } + if (this.pathHashPrefixes) { + const hasher = crypto_1.default.createHash('sha256'); + const pathHash = hasher.update(targetFilepath).digest('hex'); + return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix)); + } + return false; + } + toJSON() { + const json = { + ...super.toJSON(), + name: this.name, + terminating: this.terminating, + }; + if (this.paths) { + json.paths = this.paths; + } + if (this.pathHashPrefixes) { + json.path_hash_prefixes = this.pathHashPrefixes; + } + return json; + } + static fromJSON(data) { + const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof name !== 'string') { + throw new TypeError('name must be a string'); + } + if (typeof terminating !== 'boolean') { + throw new TypeError('terminating must be a boolean'); + } + if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { + throw new TypeError('paths must be an array of strings'); + } + if (utils_1.guard.isDefined(path_hash_prefixes) && + !utils_1.guard.isStringArray(path_hash_prefixes)) { + throw new TypeError('path_hash_prefixes must be an array of strings'); + } + return new DelegatedRole({ + keyIDs: keyids, + threshold, + name, + terminating, + paths, + pathHashPrefixes: path_hash_prefixes, + unrecognizedFields: rest, + }); + } +} +exports.DelegatedRole = DelegatedRole; +// JS version of Ruby's Array#zip +const zip = (a, b) => a.map((k, i) => [k, b[i]]); +function isTargetInPathPattern(target, pattern) { + const targetParts = target.split('/'); + const patternParts = pattern.split('/'); + if (patternParts.length != targetParts.length) { + return false; + } + return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart)); +} +/** + * Succinctly defines a hash bin delegation graph. + * + * A ``SuccinctRoles`` object describes a delegation graph that covers all + * targets, distributing them uniformly over the delegated roles (i.e. bins) + * in the graph. + * + * The total number of bins is 2 to the power of the passed ``bit_length``. + * + * Bin names are the concatenation of the passed ``name_prefix`` and a + * zero-padded hex representation of the bin index separated by a hyphen. + * + * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin + * is 'terminating'. + * + * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md + */ +class SuccinctRoles extends Role { + constructor(opts) { + super(opts); + const { bitLength, namePrefix } = opts; + if (bitLength <= 0 || bitLength > 32) { + throw new error_1.ValueError('bitLength must be between 1 and 32'); + } + this.bitLength = bitLength; + this.namePrefix = namePrefix; + // Calculate the suffix_len value based on the total number of bins in + // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will + // have a suffix between "000" and "3ff" in hex and suffix_len will be 3 + // meaning the third bin will have a suffix of "003". + this.numberOfBins = Math.pow(2, bitLength); + // suffix_len is calculated based on "number_of_bins - 1" as the name + // of the last bin contains the number "number_of_bins -1" as a suffix. + this.suffixLen = (this.numberOfBins - 1).toString(16).length; + } + equals(other) { + if (!(other instanceof SuccinctRoles)) { + return false; + } + return (super.equals(other) && + this.bitLength === other.bitLength && + this.namePrefix === other.namePrefix); + } + /*** + * Calculates the name of the delegated role responsible for 'target_filepath'. + * + * The target at path ''target_filepath' is assigned to a bin by casting + * the left-most 'bit_length' of bits of the file path hash digest to + * int, using it as bin index between 0 and '2**bit_length - 1'. + * + * Args: + * target_filepath: URL path to a target file, relative to a base + * targets URL. + */ + getRoleForTarget(targetFilepath) { + const hasher = crypto_1.default.createHash('sha256'); + const hasherBuffer = hasher.update(targetFilepath).digest(); + // can't ever need more than 4 bytes (32 bits). + const hashBytes = hasherBuffer.subarray(0, 4); + // Right shift hash bytes, so that we only have the leftmost + // bit_length bits that we care about. + const shiftValue = 32 - this.bitLength; + const binNumber = hashBytes.readUInt32BE() >>> shiftValue; + // Add zero padding if necessary and cast to hex the suffix. + const suffix = binNumber.toString(16).padStart(this.suffixLen, '0'); + return `${this.namePrefix}-${suffix}`; + } + *getRoles() { + for (let i = 0; i < this.numberOfBins; i++) { + const suffix = i.toString(16).padStart(this.suffixLen, '0'); + yield `${this.namePrefix}-${suffix}`; + } + } + /*** + * Determines whether the given ``role_name`` is in one of + * the delegated roles that ``SuccinctRoles`` represents. + * + * Args: + * role_name: The name of the role to check against. + */ + isDelegatedRole(roleName) { + const desiredPrefix = this.namePrefix + '-'; + if (!roleName.startsWith(desiredPrefix)) { + return false; + } + const suffix = roleName.slice(desiredPrefix.length, roleName.length); + if (suffix.length != this.suffixLen) { + return false; + } + // make sure the suffix is a hex string + if (!suffix.match(/^[0-9a-fA-F]+$/)) { + return false; + } + const num = parseInt(suffix, 16); + return 0 <= num && num < this.numberOfBins; + } + toJSON() { + const json = { + ...super.toJSON(), + bit_length: this.bitLength, + name_prefix: this.namePrefix, + }; + return json; + } + static fromJSON(data) { + const { keyids, threshold, bit_length, name_prefix, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof bit_length !== 'number') { + throw new TypeError('bit_length must be a number'); + } + if (typeof name_prefix !== 'string') { + throw new TypeError('name_prefix must be a string'); + } + return new SuccinctRoles({ + keyIDs: keyids, + threshold, + bitLength: bit_length, + namePrefix: name_prefix, + unrecognizedFields: rest, + }); + } +} +exports.SuccinctRoles = SuccinctRoles; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/root.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/root.js new file mode 100644 index 0000000000000..36d0ef0f186d1 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/root.js @@ -0,0 +1,116 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Root = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of root metadata. + * + * The top-level role and metadata file signed by the root keys. + * This role specifies trusted keys for all other top-level roles, which may further delegate trust. + */ +class Root extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Root; + this.keys = options.keys || {}; + this.consistentSnapshot = options.consistentSnapshot ?? true; + if (!options.roles) { + this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({ + ...acc, + [role]: new role_1.Role({ keyIDs: [], threshold: 1 }), + }), {}); + } + else { + const roleNames = new Set(Object.keys(options.roles)); + if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) { + throw new error_1.ValueError('missing top-level role'); + } + this.roles = options.roles; + } + } + addKey(key, role) { + if (!this.roles[role]) { + throw new error_1.ValueError(`role ${role} does not exist`); + } + if (!this.roles[role].keyIDs.includes(key.keyID)) { + this.roles[role].keyIDs.push(key.keyID); + } + this.keys[key.keyID] = key; + } + equals(other) { + if (!(other instanceof Root)) { + return false; + } + return (super.equals(other) && + this.consistentSnapshot === other.consistentSnapshot && + util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles)); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + keys: keysToJSON(this.keys), + roles: rolesToJSON(this.roles), + consistent_snapshot: this.consistentSnapshot, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields; + if (typeof consistent_snapshot !== 'boolean') { + throw new TypeError('consistent_snapshot must be a boolean'); + } + return new Root({ + ...commonFields, + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + consistentSnapshot: consistent_snapshot, + unrecognizedFields: rest, + }); + } +} +exports.Root = Root; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {}); +} +function rolesToJSON(roles) { + return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {}); +} +function keysFromJSON(data) { + let keys; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys must be an object'); + } + keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); + } + return keys; +} +function rolesFromJSON(data) { + let roles; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('roles must be an object'); + } + roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ + ...acc, + [roleName]: role_1.Role.fromJSON(roleData), + }), {}); + } + return roles; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/signature.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/signature.js new file mode 100644 index 0000000000000..33eb204eb0835 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/signature.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = void 0; +/** + * A container class containing information about a signature. + * + * Contains a signature and the keyid uniquely identifying the key used + * to generate the signature. + * + * Provide a `fromJSON` method to create a Signature from a JSON object. + */ +class Signature { + constructor(options) { + const { keyID, sig } = options; + this.keyID = keyID; + this.sig = sig; + } + toJSON() { + return { + keyid: this.keyID, + sig: this.sig, + }; + } + static fromJSON(data) { + const { keyid, sig } = data; + if (typeof keyid !== 'string') { + throw new TypeError('keyid must be a string'); + } + if (typeof sig !== 'string') { + throw new TypeError('sig must be a string'); + } + return new Signature({ + keyID: keyid, + sig: sig, + }); + } +} +exports.Signature = Signature; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/snapshot.js new file mode 100644 index 0000000000000..e90ea8e729e4e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/snapshot.js @@ -0,0 +1,71 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Snapshot = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of snapshot metadata. + * + * Snapshot contains information about all target Metadata files. + * A top-level role that specifies the latest versions of all targets metadata files, + * and hence the latest versions of all targets (including any dependencies between them) on the repository. + */ +class Snapshot extends base_1.Signed { + constructor(opts) { + super(opts); + this.type = base_1.MetadataKind.Snapshot; + this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; + } + equals(other) { + if (!(other instanceof Snapshot)) { + return false; + } + return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta); + } + toJSON() { + return { + _type: this.type, + meta: metaToJSON(this.meta), + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Snapshot({ + ...commonFields, + meta: metaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Snapshot = Snapshot; +function metaToJSON(meta) { + return Object.entries(meta).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: metadata.toJSON(), + }), {}); +} +function metaFromJSON(data) { + let meta; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('meta field is malformed'); + } + else { + meta = Object.entries(data).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: file_1.MetaFile.fromJSON(metadata), + }), {}); + } + } + return meta; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/targets.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/targets.js new file mode 100644 index 0000000000000..54bd8f8c554af --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/targets.js @@ -0,0 +1,92 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Targets = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const delegations_1 = require("./delegations"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +// Container for the signed part of targets metadata. +// +// Targets contains verifying information about target files and also delegates +// responsible to other Targets roles. +class Targets extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Targets; + this.targets = options.targets || {}; + this.delegations = options.delegations; + } + addTarget(target) { + this.targets[target.path] = target; + } + equals(other) { + if (!(other instanceof Targets)) { + return false; + } + return (super.equals(other) && + util_1.default.isDeepStrictEqual(this.targets, other.targets) && + util_1.default.isDeepStrictEqual(this.delegations, other.delegations)); + } + toJSON() { + const json = { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + targets: targetsToJSON(this.targets), + ...this.unrecognizedFields, + }; + if (this.delegations) { + json.delegations = this.delegations.toJSON(); + } + return json; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { targets, delegations, ...rest } = unrecognizedFields; + return new Targets({ + ...commonFields, + targets: targetsFromJSON(targets), + delegations: delegationsFromJSON(delegations), + unrecognizedFields: rest, + }); + } +} +exports.Targets = Targets; +function targetsToJSON(targets) { + return Object.entries(targets).reduce((acc, [path, target]) => ({ + ...acc, + [path]: target.toJSON(), + }), {}); +} +function targetsFromJSON(data) { + let targets; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('targets must be an object'); + } + else { + targets = Object.entries(data).reduce((acc, [path, target]) => ({ + ...acc, + [path]: file_1.TargetFile.fromJSON(path, target), + }), {}); + } + } + return targets; +} +function delegationsFromJSON(data) { + let delegations; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObject(data)) { + throw new TypeError('delegations must be an object'); + } + else { + delegations = delegations_1.Delegations.fromJSON(data); + } + } + return delegations; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/timestamp.js new file mode 100644 index 0000000000000..9880c4c9fc254 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/timestamp.js @@ -0,0 +1,58 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of timestamp metadata. + * + * A top-level that specifies the latest version of the snapshot role metadata file, + * and hence the latest versions of all metadata and targets on the repository. + */ +class Timestamp extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Timestamp; + this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); + } + equals(other) { + if (!(other instanceof Timestamp)) { + return false; + } + return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + meta: { 'snapshot.json': this.snapshotMeta.toJSON() }, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Timestamp({ + ...commonFields, + snapshotMeta: snapshotMetaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Timestamp = Timestamp; +function snapshotMetaFromJSON(data) { + let snapshotMeta; + if (utils_1.guard.isDefined(data)) { + const snapshotData = data['snapshot.json']; + if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { + throw new TypeError('missing snapshot.json in meta'); + } + else { + snapshotMeta = file_1.MetaFile.fromJSON(snapshotData); + } + } + return snapshotMeta; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/guard.js new file mode 100644 index 0000000000000..911e8475986bb --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/guard.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isDefined = isDefined; +exports.isObject = isObject; +exports.isStringArray = isStringArray; +exports.isObjectArray = isObjectArray; +exports.isStringRecord = isStringRecord; +exports.isObjectRecord = isObjectRecord; +function isDefined(val) { + return val !== undefined; +} +function isObject(value) { + return typeof value === 'object' && value !== null; +} +function isStringArray(value) { + return Array.isArray(value) && value.every((v) => typeof v === 'string'); +} +function isObjectArray(value) { + return Array.isArray(value) && value.every(isObject); +} +function isStringRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'string')); +} +function isObjectRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'object' && v !== null)); +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/index.js new file mode 100644 index 0000000000000..872aae28049c9 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/index.js @@ -0,0 +1,28 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.crypto = exports.guard = void 0; +exports.guard = __importStar(require("./guard")); +exports.crypto = __importStar(require("./verify")); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/key.js new file mode 100644 index 0000000000000..3c3ec07f1425a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/key.js @@ -0,0 +1,142 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getPublicKey = getPublicKey; +const crypto_1 = __importDefault(require("crypto")); +const error_1 = require("../error"); +const oid_1 = require("./oid"); +const ASN1_TAG_SEQUENCE = 0x30; +const ANS1_TAG_BIT_STRING = 0x03; +const NULL_BYTE = 0x00; +const OID_EDDSA = '1.3.101.112'; +const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1'; +const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7'; +const PEM_HEADER = '-----BEGIN PUBLIC KEY-----'; +function getPublicKey(keyInfo) { + switch (keyInfo.keyType) { + case 'rsa': + return getRSAPublicKey(keyInfo); + case 'ed25519': + return getED25519PublicKey(keyInfo); + case 'ecdsa': + case 'ecdsa-sha2-nistp256': + case 'ecdsa-sha2-nistp384': + return getECDCSAPublicKey(keyInfo); + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`); + } +} +function getRSAPublicKey(keyInfo) { + // Only support PEM-encoded RSA keys + if (!keyInfo.keyVal.startsWith(PEM_HEADER)) { + throw new error_1.CryptoError('Invalid key format'); + } + const key = crypto_1.default.createPublicKey(keyInfo.keyVal); + switch (keyInfo.scheme) { + case 'rsassa-pss-sha256': + return { + key: key, + padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING, + }; + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`); + } +} +function getED25519PublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ed25519.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +function getECDCSAPublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ecdsa.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +const ed25519 = { + // Translates a hex key into a crypto KeyObject + // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/ + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const oid = (0, oid_1.encodeOIDString)(OID_EDDSA); + // Create a byte sequence containing the OID and key + const elements = Buffer.concat([ + Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oid.length]), + oid, + ]), + Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]), + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([elements.length]), + elements, + ]); + return der; + }, +}; +const ecdsa = { + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const bitString = Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]); + const oids = Buffer.concat([ + (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY), + (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1), + ]); + const oidSequence = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oids.length]), + oids, + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oidSequence.length + bitString.length]), + oidSequence, + bitString, + ]); + return der; + }, +}; +const isHex = (key) => /^[0-9a-fA-F]+$/.test(key); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/oid.js new file mode 100644 index 0000000000000..00b29c3030d1e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/oid.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.encodeOIDString = encodeOIDString; +const ANS1_TAG_OID = 0x06; +function encodeOIDString(oid) { + const parts = oid.split('.'); + // The first two subidentifiers are encoded into the first byte + const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10); + const rest = []; + parts.slice(2).forEach((part) => { + const bytes = encodeVariableLengthInteger(parseInt(part, 10)); + rest.push(...bytes); + }); + const der = Buffer.from([first, ...rest]); + return Buffer.from([ANS1_TAG_OID, der.length, ...der]); +} +function encodeVariableLengthInteger(value) { + const bytes = []; + let mask = 0x00; + while (value > 0) { + bytes.unshift((value & 0x7f) | mask); + value >>= 7; + mask = 0x80; + } + return bytes; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/verify.js new file mode 100644 index 0000000000000..8232b6f6a97ab --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/verify.js @@ -0,0 +1,13 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySignature = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const crypto_1 = __importDefault(require("crypto")); +const verifySignature = (metaDataSignedData, key, signature) => { + const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData)); + return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex')); +}; +exports.verifySignature = verifySignature; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/package.json new file mode 100644 index 0000000000000..8e5132ddf1079 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/package.json @@ -0,0 +1,37 @@ +{ + "name": "@tufjs/models", + "version": "3.0.1", + "description": "TUF metadata models", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE new file mode 100644 index 0000000000000..a03cd0ed0b338 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js new file mode 100755 index 0000000000000..f35b62ca71a53 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js @@ -0,0 +1,158 @@ +#!/usr/bin/env node + +const run = conf => { + const pacote = require('../') + switch (conf._[0]) { + case 'resolve': + case 'manifest': + case 'packument': + if (conf._[0] === 'resolve' && conf.long) { + return pacote.manifest(conf._[1], conf).then(mani => ({ + resolved: mani._resolved, + integrity: mani._integrity, + from: mani._from, + })) + } + return pacote[conf._[0]](conf._[1], conf) + + case 'tarball': + if (!conf._[2] || conf._[2] === '-') { + return pacote.tarball.stream(conf._[1], stream => { + stream.pipe( + conf.testStdout || + /* istanbul ignore next */ + process.stdout + ) + // make sure it resolves something falsey + return stream.promise().then(() => { + return false + }) + }, conf) + } else { + return pacote.tarball.file(conf._[1], conf._[2], conf) + } + + case 'extract': + return pacote.extract(conf._[1], conf._[2], conf) + + default: /* istanbul ignore next */ { + throw new Error(`bad command: ${conf._[0]}`) + } + } +} + +const version = require('../package.json').version +const usage = () => +`Pacote - The JavaScript Package Handler, v${version} + +Usage: + + pacote resolve + Resolve a specifier and output the fully resolved target + Returns integrity and from if '--long' flag is set. + + pacote manifest + Fetch a manifest and print to stdout + + pacote packument + Fetch a full packument and print to stdout + + pacote tarball [] + Fetch a package tarball and save to + If is missing or '-', the tarball will be streamed to stdout. + + pacote extract + Extract a package to the destination folder. + +Configuration values all match the names of configs passed to npm, or +options passed to Pacote. Additional flags for this executable: + + --long Print an object from 'resolve', including integrity and spec. + --json Print result objects as JSON rather than node's default. + (This is the default if stdout is not a TTY.) + --help -h Print this helpful text. + +For example '--cache=/path/to/folder' will use that folder as the cache. +` + +const shouldJSON = (conf, result) => + conf.json || + !process.stdout.isTTY && + conf.json === undefined && + result && + typeof result === 'object' + +const pretty = (conf, result) => + shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result + +let addedLogListener = false +const main = args => { + const conf = parse(args) + if (conf.help || conf.h) { + return console.log(usage()) + } + + if (!addedLogListener) { + process.on('log', console.error) + addedLogListener = true + } + + try { + return run(conf) + .then(result => result && console.log(pretty(conf, result))) + .catch(er => { + console.error(er) + process.exit(1) + }) + } catch (er) { + console.error(er.message) + console.error(usage()) + } +} + +const parseArg = arg => { + const split = arg.slice(2).split('=') + const k = split.shift() + const v = split.join('=') + const no = /^no-/.test(k) && !v + const key = (no ? k.slice(3) : k) + .replace(/^tag$/, 'defaultTag') + .replace(/-([a-z])/g, (_, c) => c.toUpperCase()) + const value = v ? v.replace(/^~/, process.env.HOME) : !no + return { key, value } +} + +const parse = args => { + const conf = { + _: [], + cache: process.env.HOME + '/.npm/_cacache', + } + let dashdash = false + args.forEach(arg => { + if (dashdash) { + conf._.push(arg) + } else if (arg === '--') { + dashdash = true + } else if (arg === '-h') { + conf.help = true + } else if (/^--/.test(arg)) { + const { key, value } = parseArg(arg) + conf[key] = value + } else { + conf._.push(arg) + } + }) + return conf +} + +if (module === require.main) { + main(process.argv.slice(2)) +} else { + module.exports = { + main, + run, + usage, + parseArg, + parse, + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js new file mode 100644 index 0000000000000..04846eb8a6e22 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js @@ -0,0 +1,105 @@ +const { resolve } = require('node:path') +const packlist = require('npm-packlist') +const runScript = require('@npmcli/run-script') +const tar = require('tar') +const { Minipass } = require('minipass') +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const _ = require('./util/protected.js') +const tarCreateOptions = require('./util/tar-create-options.js') + +class DirFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + + this.tree = opts.tree || null + this.Arborist = opts.Arborist || null + } + + // exposes tarCreateOptions as public API + static tarCreateOptions (manifest) { + return tarCreateOptions(manifest) + } + + get types () { + return ['directory'] + } + + #prepareDir () { + return this.manifest().then(mani => { + if (!mani.scripts || !mani.scripts.prepare) { + return + } + if (this.opts.ignoreScripts) { + return + } + + // we *only* run prepare. + // pre/post-pack is run by the npm CLI for publish and pack, + // but this function is *also* run when installing git deps + const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe' + + return runScript({ + // this || undefined is because runScript will be unhappy with the default null value + scriptShell: this.opts.scriptShell || undefined, + pkg: mani, + event: 'prepare', + path: this.resolved, + stdio, + env: { + npm_package_resolved: this.resolved, + npm_package_integrity: this.integrity, + npm_package_json: resolve(this.resolved, 'package.json'), + }, + }) + }) + } + + [_.tarballFromResolved] () { + if (!this.tree && !this.Arborist) { + throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack') + } + + const stream = new Minipass() + stream.resolved = this.resolved + stream.integrity = this.integrity + + const { prefix, workspaces } = this.opts + + // run the prepare script, get the list of files, and tar it up + // pipe to the stream, and proxy errors the chain. + this.#prepareDir() + .then(async () => { + if (!this.tree) { + const arb = new this.Arborist({ path: this.resolved }) + this.tree = await arb.loadActual() + } + return packlist(this.tree, { path: this.resolved, prefix, workspaces }) + }) + .then(files => tar.c(tarCreateOptions(this.package), files) + .on('error', er => stream.emit('error', er)).pipe(stream)) + .catch(er => stream.emit('error', er)) + return stream + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this[_.readPackageJson](this.resolved) + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + }) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = DirFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js new file mode 100644 index 0000000000000..f2ac97619d3af --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js @@ -0,0 +1,497 @@ +// This is the base class that the other fetcher types in lib +// all descend from. +// It handles the unpacking and retry logic that is shared among +// all of the other Fetcher types. + +const { basename, dirname } = require('node:path') +const { rm, mkdir } = require('node:fs/promises') +const PackageJson = require('@npmcli/package-json') +const cacache = require('cacache') +const fsm = require('fs-minipass') +const getContents = require('@npmcli/installed-package-contents') +const npa = require('npm-package-arg') +const retry = require('promise-retry') +const ssri = require('ssri') +const tar = require('tar') +const { Minipass } = require('minipass') +const { log } = require('proc-log') +const _ = require('./util/protected.js') +const cacheDir = require('./util/cache-dir.js') +const isPackageBin = require('./util/is-package-bin.js') +const removeTrailingSlashes = require('./util/trailing-slashes.js') + +// Pacote is only concerned with the package.json contents +const packageJsonPrepare = (p) => PackageJson.prepare(p).then(pkg => pkg.content) +const packageJsonNormalize = (p) => PackageJson.normalize(p).then(pkg => pkg.content) + +class FetcherBase { + constructor (spec, opts) { + if (!opts || typeof opts !== 'object') { + throw new TypeError('options object is required') + } + this.spec = npa(spec, opts.where) + + this.allowGitIgnore = !!opts.allowGitIgnore + + // a bit redundant because presumably the caller already knows this, + // but it makes it easier to not have to keep track of the requested + // spec when we're dispatching thousands of these at once, and normalizing + // is nice. saveSpec is preferred if set, because it turns stuff like + // x/y#committish into github:x/y#committish. use name@rawSpec for + // registry deps so that we turn xyz and xyz@ -> xyz@ + this.from = this.spec.registry + ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec + + this.#assertType() + // clone the opts object so that others aren't upset when we mutate it + // by adding/modifying the integrity value. + this.opts = { ...opts } + + this.cache = opts.cache || cacheDir().cacache + this.tufCache = opts.tufCache || cacheDir().tufcache + this.resolved = opts.resolved || null + + // default to caching/verifying with sha512, that's what we usually have + // need to change this default, or start overriding it, when sha512 + // is no longer strong enough. + this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512' + + if (typeof opts.integrity === 'string') { + this.opts.integrity = ssri.parse(opts.integrity) + } + + this.package = null + this.type = this.constructor.name + this.fmode = opts.fmode || 0o666 + this.dmode = opts.dmode || 0o777 + // we don't need a default umask, because we don't chmod files coming + // out of package tarballs. they're forced to have a mode that is + // valid, regardless of what's in the tarball entry, and then we let + // the process's umask setting do its job. but if configured, we do + // respect it. + this.umask = opts.umask || 0 + + this.preferOnline = !!opts.preferOnline + this.preferOffline = !!opts.preferOffline + this.offline = !!opts.offline + + this.before = opts.before + this.fullMetadata = this.before ? true : !!opts.fullMetadata + this.fullReadJson = !!opts.fullReadJson + this[_.readPackageJson] = this.fullReadJson + ? packageJsonPrepare + : packageJsonNormalize + + // rrh is a registry hostname or 'never' or 'always' + // defaults to registry.npmjs.org + this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ? + 'registry.npmjs.org' : opts.replaceRegistryHost + + this.defaultTag = opts.defaultTag || 'latest' + this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org') + + // command to run 'prepare' scripts on directories and git dirs + // To use pacote with yarn, for example, set npmBin to 'yarn' + // and npmCliConfig with yarn's equivalents. + this.npmBin = opts.npmBin || 'npm' + + // command to install deps for preparing + this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force'] + + // XXX fill more of this in based on what we know from this.opts + // we explicitly DO NOT fill in --tag, though, since we are often + // going to be packing in the context of a publish, which may set + // a dist-tag, but certainly wants to keep defaulting to latest. + this.npmCliConfig = opts.npmCliConfig || [ + `--cache=${dirname(this.cache)}`, + `--prefer-offline=${!!this.preferOffline}`, + `--prefer-online=${!!this.preferOnline}`, + `--offline=${!!this.offline}`, + ...(this.before ? [`--before=${this.before.toISOString()}`] : []), + '--no-progress', + '--no-save', + '--no-audit', + // override any omit settings from the environment + '--include=dev', + '--include=peer', + '--include=optional', + // we need the actual things, not just the lockfile + '--no-package-lock-only', + '--no-dry-run', + ] + } + + get integrity () { + return this.opts.integrity || null + } + + set integrity (i) { + if (!i) { + return + } + + i = ssri.parse(i) + const current = this.opts.integrity + + // do not ever update an existing hash value, but do + // merge in NEW algos and hashes that we don't already have. + if (current) { + current.merge(i) + } else { + this.opts.integrity = i + } + } + + get notImplementedError () { + return new Error('not implemented in this fetcher type: ' + this.type) + } + + // override in child classes + // Returns a Promise that resolves to this.resolved string value + resolve () { + return this.resolved ? Promise.resolve(this.resolved) + : Promise.reject(this.notImplementedError) + } + + packument () { + return Promise.reject(this.notImplementedError) + } + + // override in child class + // returns a manifest containing: + // - name + // - version + // - _resolved + // - _integrity + // - plus whatever else was in there (corgi, full metadata, or pj file) + manifest () { + return Promise.reject(this.notImplementedError) + } + + // private, should be overridden. + // Note that they should *not* calculate or check integrity or cache, + // but *just* return the raw tarball data stream. + [_.tarballFromResolved] () { + throw this.notImplementedError + } + + // public, should not be overridden + tarball () { + return this.tarballStream(stream => stream.concat().then(data => { + data.integrity = this.integrity && String(this.integrity) + data.resolved = this.resolved + data.from = this.from + return data + })) + } + + // private + // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match + #tarballFromCache () { + const startTime = Date.now() + const stream = cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) + const elapsedTime = Date.now() - startTime + // cache is good, so log it as a hit in particular since there was no fetch logged + log.http( + 'cache', + `${this.spec} ${elapsedTime}ms (cache hit)` + ) + return stream + } + + get [_.cacheFetches] () { + return true + } + + #istream (stream) { + // if not caching this, just return it + if (!this.opts.cache || !this[_.cacheFetches]) { + // instead of creating a new integrity stream, we only piggyback on the + // provided stream's events + if (stream.hasIntegrityEmitter) { + stream.on('integrity', i => this.integrity = i) + return stream + } + + const istream = ssri.integrityStream(this.opts) + istream.on('integrity', i => this.integrity = i) + stream.on('error', err => istream.emit('error', err)) + return stream.pipe(istream) + } + + // we have to return a stream that gets ALL the data, and proxies errors, + // but then pipe from the original tarball stream into the cache as well. + // To do this without losing any data, and since the cacache put stream + // is not a passthrough, we have to pipe from the original stream into + // the cache AFTER we pipe into the middleStream. Since the cache stream + // has an asynchronous flush to write its contents to disk, we need to + // defer the middleStream end until the cache stream ends. + const middleStream = new Minipass() + stream.on('error', err => middleStream.emit('error', err)) + stream.pipe(middleStream, { end: false }) + const cstream = cacache.put.stream( + this.opts.cache, + `pacote:tarball:${this.from}`, + this.opts + ) + cstream.on('integrity', i => this.integrity = i) + cstream.on('error', err => stream.emit('error', err)) + stream.pipe(cstream) + + // eslint-disable-next-line promise/catch-or-return + cstream.promise().catch(() => {}).then(() => middleStream.end()) + return middleStream + } + + pickIntegrityAlgorithm () { + return this.integrity ? this.integrity.pickAlgorithm(this.opts) + : this.defaultIntegrityAlgorithm + } + + // TODO: check error class, once those are rolled out to our deps + isDataCorruptionError (er) { + return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR' + } + + // override the types getter + get types () { + return false + } + + #assertType () { + if (this.types && !this.types.includes(this.spec.type)) { + throw new TypeError(`Wrong spec type (${ + this.spec.type + }) for ${ + this.constructor.name + }. Supported types: ${this.types.join(', ')}`) + } + } + + // We allow ENOENTs from cacache, but not anywhere else. + // An ENOENT trying to read a tgz file, for example, is Right Out. + isRetriableError (er) { + // TODO: check error class, once those are rolled out to our deps + return this.isDataCorruptionError(er) || + er.code === 'ENOENT' || + er.code === 'EISDIR' + } + + // Mostly internal, but has some uses + // Pass in a function which returns a promise + // Function will be called 1 or more times with streams that may fail. + // Retries: + // Function MUST handle errors on the stream by rejecting the promise, + // so that retry logic can pick it up and either retry or fail whatever + // promise it was making (ie, failing extraction, etc.) + // + // The return value of this method is a Promise that resolves the same + // as whatever the streamHandler resolves to. + // + // This should never be overridden by child classes, but it is public. + tarballStream (streamHandler) { + // Only short-circuit via cache if we have everything else we'll need, + // and the user has not expressed a preference for checking online. + + const fromCache = ( + !this.preferOnline && + this.integrity && + this.resolved + ) ? streamHandler(this.#tarballFromCache()).catch(er => { + if (this.isDataCorruptionError(er)) { + log.warn('tarball', `cached data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Refreshing cache.`) + return this.cleanupCached().then(() => { + throw er + }) + } else { + throw er + } + }) : null + + const fromResolved = er => { + if (er) { + if (!this.isRetriableError(er)) { + throw er + } + log.silly('tarball', `no local data for ${ + this.spec + }. Extracting by manifest.`) + } + return this.resolve().then(() => retry(tryAgain => + streamHandler(this.#istream(this[_.tarballFromResolved]())) + .catch(streamErr => { + // Most likely data integrity. A cache ENOENT error is unlikely + // here, since we're definitely not reading from the cache, but it + // IS possible that the fetch subsystem accessed the cache, and the + // entry got blown away or something. Try one more time to be sure. + if (this.isRetriableError(streamErr)) { + log.warn('tarball', `tarball data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Trying again.`) + return this.cleanupCached().then(() => tryAgain(streamErr)) + } + throw streamErr + }), { retries: 1, minTimeout: 0, maxTimeout: 0 })) + } + + return fromCache ? fromCache.catch(fromResolved) : fromResolved() + } + + cleanupCached () { + return cacache.rm.content(this.cache, this.integrity, this.opts) + } + + #empty (path) { + return getContents({ path, depth: 1 }).then(contents => Promise.all( + contents.map(entry => rm(entry, { recursive: true, force: true })))) + } + + async #mkdir (dest) { + await this.#empty(dest) + return await mkdir(dest, { recursive: true }) + } + + // extraction is always the same. the only difference is where + // the tarball comes from. + async extract (dest) { + await this.#mkdir(dest) + return this.tarballStream((tarball) => this.#extract(dest, tarball)) + } + + #toFile (dest) { + return this.tarballStream(str => new Promise((res, rej) => { + const writer = new fsm.WriteStream(dest) + str.on('error', er => writer.emit('error', er)) + writer.on('error', er => rej(er)) + writer.on('close', () => res({ + integrity: this.integrity && String(this.integrity), + resolved: this.resolved, + from: this.from, + })) + str.pipe(writer) + })) + } + + // don't use this.#mkdir because we don't want to rimraf anything + async tarballFile (dest) { + const dir = dirname(dest) + await mkdir(dir, { recursive: true }) + return this.#toFile(dest) + } + + #extract (dest, tarball) { + const extractor = tar.x(this.#tarxOptions({ cwd: dest })) + const p = new Promise((resolve, reject) => { + extractor.on('end', () => { + resolve({ + resolved: this.resolved, + integrity: this.integrity && String(this.integrity), + from: this.from, + }) + }) + + extractor.on('error', er => { + log.warn('tar', er.message) + log.silly('tar', er) + reject(er) + }) + + tarball.on('error', er => reject(er)) + }) + + tarball.pipe(extractor) + return p + } + + // always ensure that entries are at least as permissive as our configured + // dmode/fmode, but never more permissive than the umask allows. + #entryMode (path, mode, type) { + const m = /Directory|GNUDumpDir/.test(type) ? this.dmode + : /File$/.test(type) ? this.fmode + : /* istanbul ignore next - should never happen in a pkg */ 0 + + // make sure package bins are executable + const exe = isPackageBin(this.package, path) ? 0o111 : 0 + // always ensure that files are read/writable by the owner + return ((mode | m) & ~this.umask) | exe | 0o600 + } + + #tarxOptions ({ cwd }) { + const sawIgnores = new Set() + return { + cwd, + noChmod: true, + noMtime: true, + filter: (name, entry) => { + if (/Link$/.test(entry.type)) { + return false + } + entry.mode = this.#entryMode(entry.path, entry.mode, entry.type) + // this replicates the npm pack behavior where .gitignore files + // are treated like .npmignore files, but only if a .npmignore + // file is not present. + if (/File$/.test(entry.type)) { + const base = basename(entry.path) + if (base === '.npmignore') { + sawIgnores.add(entry.path) + } else if (base === '.gitignore' && !this.allowGitIgnore) { + // rename, but only if there's not already a .npmignore + const ni = entry.path.replace(/\.gitignore$/, '.npmignore') + if (sawIgnores.has(ni)) { + return false + } + entry.path = ni + } + return true + } + }, + strip: 1, + onwarn: /* istanbul ignore next - we can trust that tar logs */ + (code, msg, data) => { + log.warn('tar', code, msg) + log.silly('tar', code, msg, data) + }, + umask: this.umask, + // always ignore ownership info from tarball metadata + preserveOwner: false, + } + } +} + +module.exports = FetcherBase + +// Child classes +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +// Get an appropriate fetcher object from a spec and options +FetcherBase.get = (rawSpec, opts = {}) => { + const spec = npa(rawSpec, opts.where) + switch (spec.type) { + case 'git': + return new GitFetcher(spec, opts) + + case 'remote': + return new RemoteFetcher(spec, opts) + + case 'version': + case 'range': + case 'tag': + case 'alias': + return new RegistryFetcher(spec.subSpec || spec, opts) + + case 'file': + return new FileFetcher(spec, opts) + + case 'directory': + return new DirFetcher(spec, opts) + + default: + throw new TypeError('Unknown spec type: ' + spec.type) + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js new file mode 100644 index 0000000000000..2021325085e4f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js @@ -0,0 +1,94 @@ +const { resolve } = require('node:path') +const { stat, chmod } = require('node:fs/promises') +const cacache = require('cacache') +const fsm = require('fs-minipass') +const Fetcher = require('./fetcher.js') +const _ = require('./util/protected.js') + +class FileFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + } + + get types () { + return ['file'] + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + // have to unpack the tarball for this. + return cacache.tmp.withTmp(this.cache, this.opts, dir => + this.extract(dir) + .then(() => this[_.readPackageJson](dir)) + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + })) + } + + #exeBins (pkg, dest) { + if (!pkg.bin) { + return Promise.resolve() + } + + return Promise.all(Object.keys(pkg.bin).map(async k => { + const script = resolve(dest, pkg.bin[k]) + // Best effort. Ignore errors here, the only result is that + // a bin script is not executable. But if it's missing or + // something, we just leave it for a later stage to trip over + // when we can provide a more useful contextual error. + try { + const st = await stat(script) + const mode = st.mode | 0o111 + if (mode === st.mode) { + return + } + await chmod(script, mode) + } catch { + // Ignore errors here + } + })) + } + + extract (dest) { + // if we've already loaded the manifest, then the super got it. + // but if not, read the unpacked manifest and chmod properly. + return super.extract(dest) + .then(result => this.package ? result + : this[_.readPackageJson](dest).then(pkg => + this.#exeBins(pkg, dest)).then(() => result)) + } + + [_.tarballFromResolved] () { + // create a read stream and return it + return new fsm.ReadStream(this.resolved) + } + + packument () { + // simulate based on manifest + return this.manifest().then(mani => ({ + name: mani.name, + 'dist-tags': { + [this.defaultTag]: mani.version, + }, + versions: { + [mani.version]: { + ...mani, + dist: { + tarball: `file:${this.resolved}`, + integrity: this.integrity && String(this.integrity), + }, + }, + }, + })) + } +} + +module.exports = FileFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js new file mode 100644 index 0000000000000..077193a86f026 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js @@ -0,0 +1,317 @@ +const cacache = require('cacache') +const git = require('@npmcli/git') +const npa = require('npm-package-arg') +const pickManifest = require('npm-pick-manifest') +const { Minipass } = require('minipass') +const { log } = require('proc-log') +const DirFetcher = require('./dir.js') +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const RemoteFetcher = require('./remote.js') +const _ = require('./util/protected.js') +const addGitSha = require('./util/add-git-sha.js') +const npm = require('./util/npm.js') + +const hashre = /^[a-f0-9]{40}$/ + +// get the repository url. +// prefer https if there's auth, since ssh will drop that. +// otherwise, prefer ssh if available (more secure). +// We have to add the git+ back because npa suppresses it. +const repoUrl = (h, opts) => + h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) || + h.https && addGitPlus(h.https(opts)) + +// add git+ to the url, but only one time. +const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+') + +class GitFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + + // we never want to compare integrity for git dependencies: npm/rfcs#525 + if (this.opts.integrity) { + delete this.opts.integrity + log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`) + } + + this.resolvedRef = null + if (this.spec.hosted) { + this.from = this.spec.hosted.shortcut({ noCommittish: false }) + } + + // shortcut: avoid full clone when we can go straight to the tgz + // if we have the full sha and it's a hosted git platform + if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) { + this.resolvedSha = this.spec.gitCommittish + // use hosted.tarball() when we shell to RemoteFetcher later + this.resolved = this.spec.hosted + ? repoUrl(this.spec.hosted, { noCommittish: false }) + : this.spec.rawSpec + } else { + this.resolvedSha = '' + } + + this.Arborist = opts.Arborist || null + } + + // just exposed to make it easier to test all the combinations + static repoUrl (hosted, opts) { + return repoUrl(hosted, opts) + } + + get types () { + return ['git'] + } + + resolve () { + // likely a hosted git repo with a sha, so get the tarball url + // but in general, no reason to resolve() more than necessary! + if (this.resolved) { + return super.resolve() + } + + // fetch the git repo and then look at the current hash + const h = this.spec.hosted + // try to use ssh, fall back to git. + return h + ? this.#resolvedFromHosted(h) + : this.#resolvedFromRepo(this.spec.fetchSpec) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + #resolvedFromHosted (hosted) { + return this.#resolvedFromRepo(hosted.https && hosted.https()).catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl() + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this.#resolvedFromRepo(ssh) + }) + } + + #resolvedFromRepo (gitRemote) { + // XXX make this a custom error class + if (!gitRemote) { + return Promise.reject(new Error(`No git url for ${this.spec}`)) + } + const gitRange = this.spec.gitRange + const name = this.spec.name + return git.revs(gitRemote, this.opts).then(remoteRefs => { + return gitRange ? pickManifest({ + versions: remoteRefs.versions, + 'dist-tags': remoteRefs['dist-tags'], + name, + }, gitRange, this.opts) + : this.spec.gitCommittish ? + remoteRefs.refs[this.spec.gitCommittish] || + remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]] + : remoteRefs.refs.HEAD // no git committish, get default head + }).then(revDoc => { + // the committish provided isn't in the rev list + // things like HEAD~3 or @yesterday can land here. + if (!revDoc || !revDoc.sha) { + return this.#resolvedFromClone() + } + + this.resolvedRef = revDoc + this.resolvedSha = revDoc.sha + this.#addGitSha(revDoc.sha) + return this.resolved + }) + } + + #setResolvedWithSha (withSha) { + // we haven't cloned, so a tgz download is still faster + // of course, if it's not a known host, we can't do that. + this.resolved = !this.spec.hosted ? withSha + : repoUrl(npa(withSha).hosted, { noCommittish: false }) + } + + // when we get the git sha, we affix it to our spec to build up + // either a git url with a hash, or a tarball download URL + #addGitSha (sha) { + this.#setResolvedWithSha(addGitSha(this.spec, sha)) + } + + #resolvedFromClone () { + // do a full or shallow clone, then look at the HEAD + // kind of wasteful, but no other option, really + return this.#clone(() => this.resolved) + } + + #prepareDir (dir) { + return this[_.readPackageJson](dir).then(mani => { + // no need if we aren't going to do any preparation. + const scripts = mani.scripts + if (!mani.workspaces && (!scripts || !( + scripts.postinstall || + scripts.build || + scripts.preinstall || + scripts.install || + scripts.prepack || + scripts.prepare))) { + return + } + + // to avoid cases where we have an cycle of git deps that depend + // on one another, we only ever do preparation for one instance + // of a given git dep along the chain of installations. + // Note that this does mean that a dependency MAY in theory end up + // trying to run its prepare script using a dependency that has not + // been properly prepared itself, but that edge case is smaller + // and less hazardous than a fork bomb of npm and git commands. + const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? [] + : process.env._PACOTE_NO_PREPARE_.split('\n') + if (noPrepare.includes(this.resolved)) { + log.info('prepare', 'skip prepare, already seen', this.resolved) + return + } + noPrepare.push(this.resolved) + + // the DirFetcher will do its own preparation to run the prepare scripts + // All we have to do is put the deps in place so that it can succeed. + return npm( + this.npmBin, + [].concat(this.npmInstallCmd).concat(this.npmCliConfig), + dir, + { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') }, + { message: 'git dep preparation failed' } + ) + }) + } + + [_.tarballFromResolved] () { + const stream = new Minipass() + stream.resolved = this.resolved + stream.from = this.from + + // check it out and then shell out to the DirFetcher tarball packer + this.#clone(dir => this.#prepareDir(dir) + .then(() => new Promise((res, rej) => { + if (!this.Arborist) { + throw new Error('GitFetcher requires an Arborist constructor to pack a tarball') + } + const df = new DirFetcher(`file:${dir}`, { + ...this.opts, + Arborist: this.Arborist, + resolved: null, + integrity: null, + }) + const dirStream = df[_.tarballFromResolved]() + dirStream.on('error', rej) + dirStream.on('end', res) + dirStream.pipe(stream) + }))).catch( + /* istanbul ignore next: very unlikely and hard to test */ + er => stream.emit('error', er) + ) + return stream + } + + // clone a git repo into a temp folder (or fetch and unpack if possible) + // handler accepts a directory, and returns a promise that resolves + // when we're done with it, at which point, cacache deletes it + // + // TODO: after cloning, create a tarball of the folder, and add to the cache + // with cacache.put.stream(), using a key that's deterministic based on the + // spec and repo, so that we don't ever clone the same thing multiple times. + #clone (handler, tarballOk = true) { + const o = { tmpPrefix: 'git-clone' } + const ref = this.resolvedSha || this.spec.gitCommittish + const h = this.spec.hosted + const resolved = this.resolved + + // can be set manually to false to fall back to actual git clone + tarballOk = tarballOk && + h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball + + return cacache.tmp.withTmp(this.cache, o, async tmp => { + // if we're resolved, and have a tarball url, shell out to RemoteFetcher + if (tarballOk) { + const nameat = this.spec.name ? `${this.spec.name}@` : '' + return new RemoteFetcher(h.tarball({ noCommittish: false }), { + ...this.opts, + allowGitIgnore: true, + pkgid: `git:${nameat}${this.resolved}`, + resolved: this.resolved, + integrity: null, // it'll always be different, if we have one + }).extract(tmp).then(() => handler(tmp), er => { + // fall back to ssh download if tarball fails + if (er.constructor.name.match(/^Http/)) { + return this.#clone(handler, false) + } else { + throw er + } + }) + } + + const sha = await ( + h ? this.#cloneHosted(ref, tmp) + : this.#cloneRepo(this.spec.fetchSpec, ref, tmp) + ) + this.resolvedSha = sha + if (!this.resolved) { + await this.#addGitSha(sha) + } + return handler(tmp) + }) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + #cloneHosted (ref, tmp) { + const hosted = this.spec.hosted + return this.#cloneRepo(hosted.https({ noCommittish: true }), ref, tmp) + .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this.#cloneRepo(ssh, ref, tmp) + }) + } + + #cloneRepo (repo, ref, tmp) { + const { opts, spec } = this + return git.clone(repo, ref, tmp, { ...opts, spec }) + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this.spec.hosted && this.resolved + ? FileFetcher.prototype.manifest.apply(this) + : this.#clone(dir => + this[_.readPackageJson](dir) + .then(mani => this.package = { + ...mani, + _resolved: this.resolved, + _from: this.from, + })) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = GitFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js new file mode 100644 index 0000000000000..f35314d275d5f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js @@ -0,0 +1,23 @@ +const { get } = require('./fetcher.js') +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +const tarball = (spec, opts) => get(spec, opts).tarball() +tarball.stream = (spec, handler, opts) => get(spec, opts).tarballStream(handler) +tarball.file = (spec, dest, opts) => get(spec, opts).tarballFile(dest) + +module.exports = { + GitFetcher, + RegistryFetcher, + FileFetcher, + DirFetcher, + RemoteFetcher, + resolve: (spec, opts) => get(spec, opts).resolve(), + extract: (spec, dest, opts) => get(spec, opts).extract(dest), + manifest: (spec, opts) => get(spec, opts).manifest(), + packument: (spec, opts) => get(spec, opts).packument(), + tarball, +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js new file mode 100644 index 0000000000000..1ecf4ee177349 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js @@ -0,0 +1,369 @@ +const crypto = require('node:crypto') +const PackageJson = require('@npmcli/package-json') +const pickManifest = require('npm-pick-manifest') +const ssri = require('ssri') +const npa = require('npm-package-arg') +const sigstore = require('sigstore') +const fetch = require('npm-registry-fetch') +const Fetcher = require('./fetcher.js') +const RemoteFetcher = require('./remote.js') +const pacoteVersion = require('../package.json').version +const removeTrailingSlashes = require('./util/trailing-slashes.js') +const _ = require('./util/protected.js') + +// Corgis are cute. 🐕🐶 +const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' +const fullDoc = 'application/json' + +// Some really old packages have no time field in their packument so we need a +// cutoff date. +const MISSING_TIME_CUTOFF = '2015-01-01T00:00:00.000Z' + +class RegistryFetcher extends Fetcher { + #cacheKey + constructor (spec, opts) { + super(spec, opts) + + // you usually don't want to fetch the same packument multiple times in + // the span of a given script or command, no matter how many pacote calls + // are made, so this lets us avoid doing that. It's only relevant for + // registry fetchers, because other types simulate their packument from + // the manifest, which they memoize on this.package, so it's very cheap + // already. + this.packumentCache = this.opts.packumentCache || null + + this.registry = fetch.pickRegistry(spec, opts) + this.packumentUrl = `${removeTrailingSlashes(this.registry)}/${this.spec.escapedName}` + this.#cacheKey = `${this.fullMetadata ? 'full' : 'corgi'}:${this.packumentUrl}` + + const parsed = new URL(this.registry) + const regKey = `//${parsed.host}${parsed.pathname}` + // unlike the nerf-darted auth keys, this one does *not* allow a mismatch + // of trailing slashes. It must match exactly. + if (this.opts[`${regKey}:_keys`]) { + this.registryKeys = this.opts[`${regKey}:_keys`] + } + + // XXX pacote <=9 has some logic to ignore opts.resolved if + // the resolved URL doesn't go to the same registry. + // Consider reproducing that here, to throw away this.resolved + // in that case. + } + + async resolve () { + // fetching the manifest sets resolved and (if present) integrity + await this.manifest() + if (!this.resolved) { + throw Object.assign( + new Error('Invalid package manifest: no `dist.tarball` field'), + { package: this.spec.toString() } + ) + } + return this.resolved + } + + #headers () { + return { + // npm will override UA, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'packument', + 'pacote-pkg-id': `registry:${this.spec.name}`, + accept: this.fullMetadata ? fullDoc : corgiDoc, + } + } + + async packument () { + // note this might be either an in-flight promise for a request, + // or the actual packument, but we never want to make more than + // one request at a time for the same thing regardless. + if (this.packumentCache?.has(this.#cacheKey)) { + return this.packumentCache.get(this.#cacheKey) + } + + // npm-registry-fetch the packument + // set the appropriate header for corgis if fullMetadata isn't set + // return the res.json() promise + try { + const res = await fetch(this.packumentUrl, { + ...this.opts, + headers: this.#headers(), + spec: this.spec, + + // never check integrity for packuments themselves + integrity: null, + }) + const packument = await res.json() + const contentLength = res.headers.get('content-length') + if (contentLength) { + packument._contentLength = Number(contentLength) + } + this.packumentCache?.set(this.#cacheKey, packument) + return packument + } catch (err) { + this.packumentCache?.delete(this.#cacheKey) + if (err.code !== 'E404' || this.fullMetadata) { + throw err + } + // possible that corgis are not supported by this registry + this.fullMetadata = true + return this.packument() + } + } + + async manifest () { + if (this.package) { + return this.package + } + + // When verifying signatures, we need to fetch the full/uncompressed + // packument to get publish time as this is not included in the + // corgi/compressed packument. + if (this.opts.verifySignatures) { + this.fullMetadata = true + } + + const packument = await this.packument() + const steps = PackageJson.normalizeSteps.filter(s => s !== '_attributes') + const mani = await new PackageJson().fromContent(pickManifest(packument, this.spec.fetchSpec, { + ...this.opts, + defaultTag: this.defaultTag, + before: this.before, + })).normalize({ steps }).then(p => p.content) + + /* XXX add ETARGET and E403 revalidation of cached packuments here */ + + // add _time from packument if fetched with fullMetadata + const time = packument.time?.[mani.version] + if (time) { + mani._time = time + } + + // add _resolved and _integrity from dist object + const { dist } = mani + if (dist) { + this.resolved = mani._resolved = dist.tarball + mani._from = this.from + const distIntegrity = dist.integrity ? ssri.parse(dist.integrity) + : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts }) + : null + if (distIntegrity) { + if (this.integrity && !this.integrity.match(distIntegrity)) { + // only bork if they have algos in common. + // otherwise we end up breaking if we have saved a sha512 + // previously for the tarball, but the manifest only + // provides a sha1, which is possible for older publishes. + // Otherwise, this is almost certainly a case of holding it + // wrong, and will result in weird or insecure behavior + // later on when building package tree. + for (const algo of Object.keys(this.integrity)) { + if (distIntegrity[algo]) { + throw Object.assign(new Error( + `Integrity checksum failed when using ${algo}: ` + + `wanted ${this.integrity} but got ${distIntegrity}.` + ), { code: 'EINTEGRITY' }) + } + } + } + // made it this far, the integrity is worthwhile. accept it. + // the setter here will take care of merging it into what we already + // had. + this.integrity = distIntegrity + } + } + if (this.integrity) { + mani._integrity = String(this.integrity) + if (dist.signatures) { + if (this.opts.verifySignatures) { + // validate and throw on error, then set _signatures + const message = `${mani._id}:${mani._integrity}` + for (const signature of dist.signatures) { + const publicKey = this.registryKeys && + this.registryKeys.filter(key => (key.keyid === signature.keyid))[0] + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + const publishedTime = Date.parse(mani._time || MISSING_TIME_CUTOFF) + const validPublicKey = !publicKey.expires || + publishedTime < Date.parse(publicKey.expires) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + const verifier = crypto.createVerify('SHA256') + verifier.write(message) + verifier.end() + const valid = verifier.verify( + publicKey.pemkey, + signature.sig, + 'base64' + ) + if (!valid) { + throw Object.assign(new Error( + `${mani._id} has an invalid registry signature with ` + + `keyid: ${publicKey.keyid} and signature: ${signature.sig}` + ), { + code: 'EINTEGRITYSIGNATURE', + keyid: publicKey.keyid, + signature: signature.sig, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._signatures = dist.signatures + } else { + mani._signatures = dist.signatures + } + } + + if (dist.attestations) { + if (this.opts.verifyAttestations) { + // Always fetch attestations from the current registry host + const attestationsPath = new URL(dist.attestations.url).pathname + const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath + const res = await fetch(attestationsUrl, { + ...this.opts, + // disable integrity check for attestations json payload, we check the + // integrity in the verification steps below + integrity: null, + }) + const { attestations } = await res.json() + const bundles = attestations.map(({ predicateType, bundle }) => { + const statement = JSON.parse( + Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8') + ) + const keyid = bundle.dsseEnvelope.signatures[0].keyid + const signature = bundle.dsseEnvelope.signatures[0].sig + + return { + predicateType, + bundle, + statement, + keyid, + signature, + } + }) + + const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k) + const attestationRegistryKeys = (this.registryKeys || []) + .filter(key => attestationKeyIds.includes(key.keyid)) + if (!attestationRegistryKeys.length) { + throw Object.assign(new Error( + `${mani._id} has attestations but no corresponding public key(s) can be found` + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + for (const { predicateType, bundle, keyid, signature, statement } of bundles) { + const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid) + // Publish attestations have a keyid set and a valid public key must be found + if (keyid) { + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + const integratedTime = new Date( + Number( + bundle.verificationMaterial.tlogEntries[0].integratedTime + ) * 1000 + ) + const validPublicKey = !publicKey.expires || + (integratedTime < Date.parse(publicKey.expires)) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + } + + const subject = { + name: statement.subject[0].name, + sha512: statement.subject[0].digest.sha512, + } + + // Only type 'version' can be turned into a PURL + const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec + // Verify the statement subject matches the package, version + if (subject.name !== purl) { + throw Object.assign(new Error( + `${mani._id} package name and version (PURL): ${purl} ` + + `doesn't match what was signed: ${subject.name}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + // Verify the statement subject matches the tarball integrity + const integrityHexDigest = ssri.parse(this.integrity).hexDigest() + if (subject.sha512 !== integrityHexDigest) { + throw Object.assign(new Error( + `${mani._id} package integrity (hex digest): ` + + `${integrityHexDigest} ` + + `doesn't match what was signed: ${subject.sha512}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + try { + // Provenance attestations are signed with a signing certificate + // (including the key) so we don't need to return a public key. + // + // Publish attestations are signed with a keyid so we need to + // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys` + const options = { + tufCachePath: this.tufCache, + tufForceCache: true, + keySelector: publicKey ? () => publicKey.pemkey : undefined, + } + await sigstore.verify(bundle, options) + } catch (e) { + throw Object.assign(new Error( + `${mani._id} failed to verify attestation: ${e.message}` + ), { + code: 'EATTESTATIONVERIFY', + predicateType, + keyid, + signature, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._attestations = dist.attestations + } else { + mani._attestations = dist.attestations + } + } + } + + this.package = mani + return this.package + } + + [_.tarballFromResolved] () { + // we use a RemoteFetcher to get the actual tarball stream + return new RemoteFetcher(this.resolved, { + ...this.opts, + resolved: this.resolved, + pkgid: `registry:${this.spec.name}@${this.resolved}`, + })[_.tarballFromResolved]() + } + + get types () { + return [ + 'tag', + 'version', + 'range', + ] + } +} +module.exports = RegistryFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js new file mode 100644 index 0000000000000..bd321e65a1f18 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js @@ -0,0 +1,89 @@ +const fetch = require('npm-registry-fetch') +const { Minipass } = require('minipass') +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const _ = require('./util/protected.js') +const pacoteVersion = require('../package.json').version + +class RemoteFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + this.resolved = this.spec.fetchSpec + const resolvedURL = new URL(this.resolved) + if (this.replaceRegistryHost !== 'never' + && (this.replaceRegistryHost === 'always' + || this.replaceRegistryHost === resolvedURL.host)) { + this.resolved = new URL(resolvedURL.pathname, this.registry).href + } + + // nam is a fermented pork sausage that is good to eat + const nameat = this.spec.name ? `${this.spec.name}@` : '' + this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}` + } + + // Don't need to cache tarball fetches in pacote, because make-fetch-happen + // will write into cacache anyway. + get [_.cacheFetches] () { + return false + } + + [_.tarballFromResolved] () { + const stream = new Minipass() + stream.hasIntegrityEmitter = true + + const fetchOpts = { + ...this.opts, + headers: this.#headers(), + spec: this.spec, + integrity: this.integrity, + algorithms: [this.pickIntegrityAlgorithm()], + } + + // eslint-disable-next-line promise/always-return + fetch(this.resolved, fetchOpts).then(res => { + res.body.on('error', + /* istanbul ignore next - exceedingly rare and hard to simulate */ + er => stream.emit('error', er) + ) + + res.body.on('integrity', i => { + this.integrity = i + stream.emit('integrity', i) + }) + + res.body.pipe(stream) + }).catch(er => stream.emit('error', er)) + + return stream + } + + #headers () { + return { + // npm will override this, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'tarball', + 'pacote-pkg-id': this.pkgid, + ...(this.integrity ? { 'pacote-integrity': String(this.integrity) } + : {}), + ...(this.opts.headers || {}), + } + } + + get types () { + return ['remote'] + } + + // getting a packument and/or manifest is the same as with a file: spec. + // unpack the tarball stream, and then read from the package.json file. + packument () { + return FileFetcher.prototype.packument.apply(this) + } + + manifest () { + return FileFetcher.prototype.manifest.apply(this) + } +} +module.exports = RemoteFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js new file mode 100644 index 0000000000000..843fe5b600caf --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js @@ -0,0 +1,15 @@ +// add a sha to a git remote url spec +const addGitSha = (spec, sha) => { + if (spec.hosted) { + const h = spec.hosted + const opt = { noCommittish: true } + const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt) + + return `${base}#${sha}` + } else { + // don't use new URL for this, because it doesn't handle scp urls + return spec.rawSpec.replace(/#.*$/, '') + `#${sha}` + } +} + +module.exports = addGitSha diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js new file mode 100644 index 0000000000000..ba5683a7bb5bf --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js @@ -0,0 +1,15 @@ +const { resolve } = require('node:path') +const { tmpdir, homedir } = require('node:os') + +module.exports = (fakePlatform = false) => { + const temp = tmpdir() + const uidOrPid = process.getuid ? process.getuid() : process.pid + const home = homedir() || resolve(temp, 'npm-' + uidOrPid) + const platform = fakePlatform || process.platform + const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' + const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home + return { + cacache: resolve(cacheRoot, cacheExtra, '_cacache'), + tufcache: resolve(cacheRoot, cacheExtra, '_tuf'), + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js new file mode 100644 index 0000000000000..49a3f73f537ce --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js @@ -0,0 +1,25 @@ +// Function to determine whether a path is in the package.bin set. +// Used to prevent issues when people publish a package from a +// windows machine, and then install with --no-bin-links. +// +// Note: this is not possible in remote or file fetchers, since +// we don't have the manifest until AFTER we've unpacked. But the +// main use case is registry fetching with git a distant second, +// so that's an acceptable edge case to not handle. + +const binObj = (name, bin) => + typeof bin === 'string' ? { [name]: bin } : bin + +const hasBin = (pkg, path) => { + const bin = binObj(pkg.name, pkg.bin) + const p = path.replace(/^[^\\/]*\//, '') + for (const kv of Object.entries(bin)) { + if (kv[1] === p) { + return true + } + } + return false +} + +module.exports = (pkg, path) => + pkg && pkg.bin ? hasBin(pkg, path) : false diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js new file mode 100644 index 0000000000000..a3005c255565f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js @@ -0,0 +1,14 @@ +// run an npm command +const spawn = require('@npmcli/promise-spawn') + +module.exports = (npmBin, npmCommand, cwd, env, extra) => { + const isJS = npmBin.endsWith('.js') + const cmd = isJS ? process.execPath : npmBin + const args = (isJS ? [npmBin] : []).concat(npmCommand) + // when installing to run the `prepare` script for a git dep, we need + // to ensure that we don't run into a cycle of checking out packages + // in temp directories. this lets us link previously-seen repos that + // are also being prepared. + + return spawn(cmd, args, { cwd, env }, extra) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js new file mode 100644 index 0000000000000..e05203b481e6a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js @@ -0,0 +1,5 @@ +module.exports = { + cacheFetches: Symbol.for('pacote.Fetcher._cacheFetches'), + readPackageJson: Symbol.for('package.Fetcher._readPackageJson'), + tarballFromResolved: Symbol.for('pacote.Fetcher._tarballFromResolved'), +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js new file mode 100644 index 0000000000000..d070f0f7ba2d4 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js @@ -0,0 +1,31 @@ +const isPackageBin = require('./is-package-bin.js') + +const tarCreateOptions = manifest => ({ + cwd: manifest._resolved, + prefix: 'package/', + portable: true, + gzip: { + // forcing the level to 9 seems to avoid some + // platform specific optimizations that cause + // integrity mismatch errors due to differing + // end results after compression + level: 9, + }, + + // ensure that package bins are always executable + // Note that npm-packlist is already filtering out + // anything that is not a regular file, ignored by + // .npmignore or package.json "files", etc. + filter: (path, stat) => { + if (isPackageBin(manifest, path)) { + stat.mode |= 0o111 + } + return true + }, + + // Provide a specific date in the 1980s for the benefit of zip, + // which is confounded by files dated at the Unix epoch 0. + mtime: new Date('1985-10-26T08:15:00.000Z'), +}) + +module.exports = tarCreateOptions diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js new file mode 100644 index 0000000000000..c50cb6173b92e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js @@ -0,0 +1,10 @@ +const removeTrailingSlashes = (input) => { + // in order to avoid regexp redos detection + let output = input + while (output.endsWith('/')) { + output = output.slice(0, -1) + } + return output +} + +module.exports = removeTrailingSlashes diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json new file mode 100644 index 0000000000000..335c7a6c87bd3 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json @@ -0,0 +1,79 @@ +{ + "name": "pacote", + "version": "20.0.0", + "description": "JavaScript package downloader", + "author": "GitHub Inc.", + "bin": { + "pacote": "bin/index.js" + }, + "license": "ISC", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "timeout": 300, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/arborist": "^7.1.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "hosted-git-info": "^8.0.0", + "mutate-fs": "^2.1.1", + "nock": "^13.2.4", + "npm-registry-mock": "^1.3.2", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "keywords": [ + "packages", + "npm", + "git" + ], + "dependencies": { + "@npmcli/git": "^6.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "@npmcli/run-script": "^9.0.0", + "cacache": "^19.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^12.0.0", + "npm-packlist": "^9.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1", + "sigstore": "^3.0.0", + "ssri": "^12.0.0", + "tar": "^6.1.11" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/pacote.git" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "windowsCI": false, + "publish": "true" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/config.js b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/config.js new file mode 100644 index 0000000000000..e8b2392f97f23 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/config.js @@ -0,0 +1,120 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0; +exports.createBundleBuilder = createBundleBuilder; +exports.createKeyFinder = createKeyFinder; +exports.createVerificationPolicy = createVerificationPolicy; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const sign_1 = require("@sigstore/sign"); +const verify_1 = require("@sigstore/verify"); +exports.DEFAULT_RETRY = { retries: 2 }; +exports.DEFAULT_TIMEOUT = 5000; +function createBundleBuilder(bundleType, options) { + const bundlerOptions = { + signer: initSigner(options), + witnesses: initWitnesses(options), + }; + switch (bundleType) { + case 'messageSignature': + return new sign_1.MessageSignatureBundleBuilder(bundlerOptions); + case 'dsseEnvelope': + return new sign_1.DSSEBundleBuilder({ + ...bundlerOptions, + certificateChain: options.legacyCompatibility, + }); + } +} +// Translates the public KeySelector type into the KeyFinderFunc type needed by +// the verifier. +function createKeyFinder(keySelector) { + return (hint) => { + const key = keySelector(hint); + if (!key) { + throw new verify_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `key not found: ${hint}`, + }); + } + return { + publicKey: core_1.crypto.createPublicKey(key), + validFor: () => true, + }; + }; +} +function createVerificationPolicy(options) { + const policy = {}; + const san = options.certificateIdentityEmail || options.certificateIdentityURI; + if (san) { + policy.subjectAlternativeName = san; + } + if (options.certificateIssuer) { + policy.extensions = { issuer: options.certificateIssuer }; + } + return policy; +} +// Instantiate the FulcioSigner based on the supplied options. +function initSigner(options) { + return new sign_1.FulcioSigner({ + fulcioBaseURL: options.fulcioURL, + identityProvider: options.identityProvider || initIdentityProvider(options), + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, + }); +} +// Instantiate an identity provider based on the supplied options. If an +// explicit identity token is provided, use that. Otherwise, use the CI +// context provider. +function initIdentityProvider(options) { + const token = options.identityToken; + if (token) { + /* istanbul ignore next */ + return { getToken: () => Promise.resolve(token) }; + } + else { + return new sign_1.CIContextProvider('sigstore'); + } +} +// Instantiate a collection of witnesses based on the supplied options. +function initWitnesses(options) { + const witnesses = []; + if (isRekorEnabled(options)) { + witnesses.push(new sign_1.RekorWitness({ + rekorBaseURL: options.rekorURL, + entryType: options.legacyCompatibility ? 'intoto' : 'dsse', + fetchOnConflict: false, + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, + })); + } + if (isTSAEnabled(options)) { + witnesses.push(new sign_1.TSAWitness({ + tsaBaseURL: options.tsaServerURL, + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, + })); + } + return witnesses; +} +// Type assertion to ensure that Rekor is enabled +function isRekorEnabled(options) { + return options.tlogUpload !== false; +} +// Type assertion to ensure that TSA is enabled +function isTSAEnabled(options) { + return options.tsaServerURL !== undefined; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/index.js new file mode 100644 index 0000000000000..7f6a5cf86bbfc --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/index.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0; +/* +Copyright 2022 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var bundle_1 = require("@sigstore/bundle"); +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } }); +var sign_1 = require("@sigstore/sign"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } }); +var tuf_1 = require("@sigstore/tuf"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } }); +var verify_1 = require("@sigstore/verify"); +Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return verify_1.PolicyError; } }); +Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return verify_1.VerificationError; } }); +var sigstore_1 = require("./sigstore"); +Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } }); +Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } }); +Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } }); +Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/sigstore.js b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/sigstore.js new file mode 100644 index 0000000000000..2b37ef46b7438 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/sigstore.js @@ -0,0 +1,102 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sign = sign; +exports.attest = attest; +exports.verify = verify; +exports.createVerifier = createVerifier; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("@sigstore/bundle"); +const tuf = __importStar(require("@sigstore/tuf")); +const verify_1 = require("@sigstore/verify"); +const config = __importStar(require("./config")); +async function sign(payload, +/* istanbul ignore next */ +options = {}) { + const bundler = config.createBundleBuilder('messageSignature', options); + const bundle = await bundler.create({ data: payload }); + return (0, bundle_1.bundleToJSON)(bundle); +} +async function attest(payload, payloadType, +/* istanbul ignore next */ +options = {}) { + const bundler = config.createBundleBuilder('dsseEnvelope', options); + const bundle = await bundler.create({ data: payload, type: payloadType }); + return (0, bundle_1.bundleToJSON)(bundle); +} +async function verify(bundle, dataOrOptions, options) { + let data; + if (Buffer.isBuffer(dataOrOptions)) { + data = dataOrOptions; + } + else { + options = dataOrOptions; + } + return createVerifier(options).then((verifier) => verifier.verify(bundle, data)); +} +async function createVerifier( +/* istanbul ignore next */ +options = {}) { + const trustedRoot = await tuf.getTrustedRoot({ + mirrorURL: options.tufMirrorURL, + rootPath: options.tufRootPath, + cachePath: options.tufCachePath, + forceCache: options.tufForceCache, + retry: options.retry ?? config.DEFAULT_RETRY, + timeout: options.timeout ?? config.DEFAULT_TIMEOUT, + }); + const keyFinder = options.keySelector + ? config.createKeyFinder(options.keySelector) + : undefined; + const trustMaterial = (0, verify_1.toTrustMaterial)(trustedRoot, keyFinder); + const verifierOptions = { + ctlogThreshold: options.ctLogThreshold, + tlogThreshold: options.tlogThreshold, + }; + const verifier = new verify_1.Verifier(trustMaterial, verifierOptions); + const policy = config.createVerificationPolicy(options); + return { + verify: (bundle, payload) => { + const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle); + const signedEntity = (0, verify_1.toSignedEntity)(deserializedBundle, payload); + verifier.verify(signedEntity, policy); + return; + }, + }; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/package.json new file mode 100644 index 0000000000000..0f798a263657b --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/package.json @@ -0,0 +1,47 @@ +{ + "name": "sigstore", + "version": "3.0.0", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/client#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/rekor-types": "^3.0.0", + "@sigstore/jest": "^0.0.0", + "@sigstore/mock": "^0.8.0", + "@tufjs/repo-mock": "^3.0.1", + "@types/make-fetch-happen": "^10.0.4" + }, + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^3.0.0", + "@sigstore/tuf": "^3.0.0", + "@sigstore/verify": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js new file mode 100644 index 0000000000000..c66d76af86b98 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultConfig = void 0; +exports.defaultConfig = { + maxRootRotations: 256, + maxDelegations: 32, + rootMaxLength: 512000, //bytes + timestampMaxLength: 16384, // bytes + snapshotMaxLength: 2000000, // bytes + targetsMaxLength: 5000000, // bytes + prefixTargetsWithHash: true, + fetchTimeout: 100000, // milliseconds + fetchRetries: undefined, + fetchRetry: 2, +}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js new file mode 100644 index 0000000000000..f4b10fa202895 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +class RuntimeError extends Error { +} +exports.RuntimeError = RuntimeError; +class PersistError extends Error { +} +exports.PersistError = PersistError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error for metadata that contains an invalid version number. +class BadVersionError extends RepositoryError { +} +exports.BadVersionError = BadVersionError; +// An error for metadata containing a previously verified version number. +class EqualVersionError extends BadVersionError { +} +exports.EqualVersionError = EqualVersionError; +// Indicate that a TUF Metadata file has expired. +class ExpiredMetadataError extends RepositoryError { +} +exports.ExpiredMetadataError = ExpiredMetadataError; +//----- Download Errors ------------------------------------------------------- +// An error occurred while attempting to download a file. +class DownloadError extends Error { +} +exports.DownloadError = DownloadError; +// Indicate that a mismatch of lengths was seen while downloading a file +class DownloadLengthMismatchError extends DownloadError { +} +exports.DownloadLengthMismatchError = DownloadLengthMismatchError; +// Returned by FetcherInterface implementations for HTTP errors. +class DownloadHTTPError extends DownloadError { + constructor(message, statusCode) { + super(message); + this.statusCode = statusCode; + } +} +exports.DownloadHTTPError = DownloadHTTPError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js new file mode 100644 index 0000000000000..f966ce1bb0cdc --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js @@ -0,0 +1,84 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultFetcher = exports.BaseFetcher = void 0; +const debug_1 = __importDefault(require("debug")); +const fs_1 = __importDefault(require("fs")); +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const tmpfile_1 = require("./utils/tmpfile"); +const log = (0, debug_1.default)('tuf:fetch'); +class BaseFetcher { + // Download file from given URL. The file is downloaded to a temporary + // location and then passed to the given handler. The handler is responsible + // for moving the file to its final location. The temporary file is deleted + // after the handler returns. + async downloadFile(url, maxLength, handler) { + return (0, tmpfile_1.withTempFile)(async (tmpFile) => { + const reader = await this.fetch(url); + let numberOfBytesReceived = 0; + const fileStream = fs_1.default.createWriteStream(tmpFile); + // Read the stream a chunk at a time so that we can check + // the length of the file as we go + try { + for await (const chunk of reader) { + const bufferChunk = Buffer.from(chunk); + numberOfBytesReceived += bufferChunk.length; + if (numberOfBytesReceived > maxLength) { + throw new error_1.DownloadLengthMismatchError('Max length reached'); + } + await writeBufferToStream(fileStream, bufferChunk); + } + } + finally { + // Make sure we always close the stream + await util_1.default.promisify(fileStream.close).bind(fileStream)(); + } + return handler(tmpFile); + }); + } + // Download bytes from given URL. + async downloadBytes(url, maxLength) { + return this.downloadFile(url, maxLength, async (file) => { + const stream = fs_1.default.createReadStream(file); + const chunks = []; + for await (const chunk of stream) { + chunks.push(chunk); + } + return Buffer.concat(chunks); + }); + } +} +exports.BaseFetcher = BaseFetcher; +class DefaultFetcher extends BaseFetcher { + constructor(options = {}) { + super(); + this.timeout = options.timeout; + this.retry = options.retry; + } + async fetch(url) { + log('GET %s', url); + const response = await (0, make_fetch_happen_1.default)(url, { + timeout: this.timeout, + retry: this.retry, + }); + if (!response.ok || !response?.body) { + throw new error_1.DownloadHTTPError('Failed to download', response.status); + } + return response.body; + } +} +exports.DefaultFetcher = DefaultFetcher; +const writeBufferToStream = async (stream, buffer) => { + return new Promise((resolve, reject) => { + stream.write(buffer, (err) => { + if (err) { + reject(err); + } + resolve(true); + }); + }); +}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js new file mode 100644 index 0000000000000..5a83b91f355d8 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0; +var models_1 = require("@tufjs/models"); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } }); +var fetcher_1 = require("./fetcher"); +Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } }); +var updater_1 = require("./updater"); +Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js new file mode 100644 index 0000000000000..8567336108709 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js @@ -0,0 +1,208 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TrustedMetadataStore = void 0; +const models_1 = require("@tufjs/models"); +const error_1 = require("./error"); +class TrustedMetadataStore { + constructor(rootData) { + this.trustedSet = {}; + // Client workflow 5.1: record fixed update start time + this.referenceTime = new Date(); + // Client workflow 5.2: load trusted root metadata + this.loadTrustedRoot(rootData); + } + get root() { + if (!this.trustedSet.root) { + throw new ReferenceError('No trusted root metadata'); + } + return this.trustedSet.root; + } + get timestamp() { + return this.trustedSet.timestamp; + } + get snapshot() { + return this.trustedSet.snapshot; + } + get targets() { + return this.trustedSet.targets; + } + getRole(name) { + return this.trustedSet[name]; + } + updateRoot(bytesBuffer) { + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (newRoot.signed.type != models_1.MetadataKind.Root) { + throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`); + } + // Client workflow 5.4: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot); + // Client workflow 5.5: check for rollback attack + if (newRoot.signed.version != this.root.signed.version + 1) { + throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`); + } + // Check that new root is signed by self + newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot); + // Client workflow 5.7: set new root as trusted root + this.trustedSet.root = newRoot; + return newRoot; + } + updateTimestamp(bytesBuffer) { + if (this.snapshot) { + throw new error_1.RuntimeError('Cannot update timestamp after snapshot'); + } + if (this.root.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('Final root.json is expired'); + } + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data); + if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) { + throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`); + } + // Client workflow 5.4.2: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp); + if (this.timestamp) { + // Prevent rolling back timestamp version + // Client workflow 5.4.3.1: check for rollback attack + if (newTimestamp.signed.version < this.timestamp.signed.version) { + throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`); + } + // Keep using old timestamp if versions are equal. + if (newTimestamp.signed.version === this.timestamp.signed.version) { + throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`); + } + // Prevent rolling back snapshot version + // Client workflow 5.4.3.2: check for rollback attack + const snapshotMeta = this.timestamp.signed.snapshotMeta; + const newSnapshotMeta = newTimestamp.signed.snapshotMeta; + if (newSnapshotMeta.version < snapshotMeta.version) { + throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`); + } + } + // expiry not checked to allow old timestamp to be used for rollback + // protection of new timestamp: expiry is checked in update_snapshot + this.trustedSet.timestamp = newTimestamp; + // Client workflow 5.4.4: check for freeze attack + this.checkFinalTimestamp(); + return newTimestamp; + } + updateSnapshot(bytesBuffer, trusted = false) { + if (!this.timestamp) { + throw new error_1.RuntimeError('Cannot update snapshot before timestamp'); + } + if (this.targets) { + throw new error_1.RuntimeError('Cannot update snapshot after targets'); + } + // Snapshot cannot be loaded if final timestamp is expired + this.checkFinalTimestamp(); + const snapshotMeta = this.timestamp.signed.snapshotMeta; + // Verify non-trusted data against the hashes in timestamp, if any. + // Trusted snapshot data has already been verified once. + // Client workflow 5.5.2: check against timestamp role's snaphsot hash + if (!trusted) { + snapshotMeta.verify(bytesBuffer); + } + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data); + if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) { + throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`); + } + // Client workflow 5.5.3: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot); + // version check against meta version (5.5.4) is deferred to allow old + // snapshot to be used in rollback protection + // Client workflow 5.5.5: check for rollback attack + if (this.snapshot) { + Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => { + const newFileInfo = newSnapshot.signed.meta[fileName]; + if (!newFileInfo) { + throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`); + } + if (newFileInfo.version < fileInfo.version) { + throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`); + } + }); + } + this.trustedSet.snapshot = newSnapshot; + // snapshot is loaded, but we raise if it's not valid _final_ snapshot + // Client workflow 5.5.4 & 5.5.6 + this.checkFinalSnapsnot(); + return newSnapshot; + } + updateDelegatedTargets(bytesBuffer, roleName, delegatorName) { + if (!this.snapshot) { + throw new error_1.RuntimeError('Cannot update delegated targets before snapshot'); + } + // Targets cannot be loaded if final snapshot is expired or its version + // does not match meta version in timestamp. + this.checkFinalSnapsnot(); + const delegator = this.trustedSet[delegatorName]; + if (!delegator) { + throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`); + } + // Extract metadata for the delegated role from snapshot + const meta = this.snapshot.signed.meta?.[`${roleName}.json`]; + if (!meta) { + throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`); + } + // Client workflow 5.6.2: check against snapshot role's targets hash + meta.verify(bytesBuffer); + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data); + if (newDelegate.signed.type != models_1.MetadataKind.Targets) { + throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`); + } + // Client workflow 5.6.3: check for arbitrary software attack + delegator.verifyDelegate(roleName, newDelegate); + // Client workflow 5.6.4: Check against snapshot role’s targets version + const version = newDelegate.signed.version; + if (version != meta.version) { + throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`); + } + // Client workflow 5.6.5: check for a freeze attack + if (newDelegate.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`); + } + this.trustedSet[roleName] = newDelegate; + } + // Verifies and loads data as trusted root metadata. + // Note that an expired initial root is still considered valid. + loadTrustedRoot(bytesBuffer) { + const data = JSON.parse(bytesBuffer.toString('utf8')); + const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (root.signed.type != models_1.MetadataKind.Root) { + throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`); + } + root.verifyDelegate(models_1.MetadataKind.Root, root); + this.trustedSet['root'] = root; + } + checkFinalTimestamp() { + // Timestamp MUST be loaded + if (!this.timestamp) { + throw new ReferenceError('No trusted timestamp metadata'); + } + // Client workflow 5.4.4: check for freeze attack + if (this.timestamp.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('Final timestamp.json is expired'); + } + } + checkFinalSnapsnot() { + // Snapshot and timestamp MUST be loaded + if (!this.snapshot) { + throw new ReferenceError('No trusted snapshot metadata'); + } + if (!this.timestamp) { + throw new ReferenceError('No trusted timestamp metadata'); + } + // Client workflow 5.5.6: check for freeze attack + if (this.snapshot.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('snapshot.json is expired'); + } + // Client workflow 5.5.4: check against timestamp role’s snapshot version + const snapshotMeta = this.timestamp.signed.snapshotMeta; + if (this.snapshot.signed.version !== snapshotMeta.version) { + throw new error_1.BadVersionError("Snapshot version doesn't match timestamp"); + } + } +} +exports.TrustedMetadataStore = TrustedMetadataStore; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js new file mode 100644 index 0000000000000..8d5eb4428f044 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js @@ -0,0 +1,350 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Updater = void 0; +const models_1 = require("@tufjs/models"); +const debug_1 = __importDefault(require("debug")); +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); +const config_1 = require("./config"); +const error_1 = require("./error"); +const fetcher_1 = require("./fetcher"); +const store_1 = require("./store"); +const url = __importStar(require("./utils/url")); +const log = (0, debug_1.default)('tuf:cache'); +class Updater { + constructor(options) { + const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; + this.dir = metadataDir; + this.metadataBaseUrl = metadataBaseUrl; + this.targetDir = targetDir; + this.targetBaseUrl = targetBaseUrl; + this.forceCache = options.forceCache ?? false; + const data = this.loadLocalMetadata(models_1.MetadataKind.Root); + this.trustedSet = new store_1.TrustedMetadataStore(data); + this.config = { ...config_1.defaultConfig, ...config }; + this.fetcher = + fetcher || + new fetcher_1.DefaultFetcher({ + timeout: this.config.fetchTimeout, + retry: this.config.fetchRetries ?? this.config.fetchRetry, + }); + } + // refresh and load the metadata before downloading the target + // refresh should be called once after the client is initialized + async refresh() { + // If forceCache is true, try to load the timestamp from local storage + // without fetching it from the remote. Otherwise, load the root and + // timestamp from the remote per the TUF spec. + if (this.forceCache) { + // If anything fails, load the root and timestamp from the remote. This + // should cover any situation where the local metadata is corrupted or + // expired. + try { + await this.loadTimestamp({ checkRemote: false }); + } + catch (error) { + await this.loadRoot(); + await this.loadTimestamp(); + } + } + else { + await this.loadRoot(); + await this.loadTimestamp(); + } + await this.loadSnapshot(); + await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root); + } + // Returns the TargetFile instance with information for the given target path. + // + // Implicitly calls refresh if it hasn't already been called. + async getTargetInfo(targetPath) { + if (!this.trustedSet.targets) { + await this.refresh(); + } + return this.preorderDepthFirstWalk(targetPath); + } + async downloadTarget(targetInfo, filePath, targetBaseUrl) { + const targetPath = filePath || this.generateTargetPath(targetInfo); + if (!targetBaseUrl) { + if (!this.targetBaseUrl) { + throw new error_1.ValueError('Target base URL not set'); + } + targetBaseUrl = this.targetBaseUrl; + } + let targetFilePath = targetInfo.path; + const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot; + if (consistentSnapshot && this.config.prefixTargetsWithHash) { + const hashes = Object.values(targetInfo.hashes); + const { dir, base } = path.parse(targetFilePath); + const filename = `${hashes[0]}.${base}`; + targetFilePath = dir ? `${dir}/${filename}` : filename; + } + const targetUrl = url.join(targetBaseUrl, targetFilePath); + // Client workflow 5.7.3: download target file + await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => { + // Verify hashes and length of downloaded file + await targetInfo.verify(fs.createReadStream(fileName)); + // Copy file to target path + log('WRITE %s', targetPath); + fs.copyFileSync(fileName, targetPath); + }); + return targetPath; + } + async findCachedTarget(targetInfo, filePath) { + if (!filePath) { + filePath = this.generateTargetPath(targetInfo); + } + try { + if (fs.existsSync(filePath)) { + await targetInfo.verify(fs.createReadStream(filePath)); + return filePath; + } + } + catch (error) { + return; // File not found + } + return; // File not found + } + loadLocalMetadata(fileName) { + const filePath = path.join(this.dir, `${fileName}.json`); + log('READ %s', filePath); + return fs.readFileSync(filePath); + } + // Sequentially load and persist on local disk every newer root metadata + // version available on the remote. + // Client workflow 5.3: update root role + async loadRoot() { + // Client workflow 5.3.2: version of trusted root metadata file + const rootVersion = this.trustedSet.root.signed.version; + const lowerBound = rootVersion + 1; + const upperBound = lowerBound + this.config.maxRootRotations; + for (let version = lowerBound; version < upperBound; version++) { + const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`); + try { + // Client workflow 5.3.3: download new root metadata file + const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength); + // Client workflow 5.3.4 - 5.4.7 + this.trustedSet.updateRoot(bytesData); + // Client workflow 5.3.8: persist root metadata file + this.persistMetadata(models_1.MetadataKind.Root, bytesData); + } + catch (error) { + if (error instanceof error_1.DownloadHTTPError) { + // 404/403 means current root is newest available + if ([403, 404].includes(error.statusCode)) { + break; + } + } + throw error; + } + } + } + // Load local and remote timestamp metadata. + // Client workflow 5.4: update timestamp role + async loadTimestamp({ checkRemote } = { checkRemote: true }) { + // Load local and remote timestamp metadata + try { + const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp); + this.trustedSet.updateTimestamp(data); + // If checkRemote is disabled, return here to avoid fetching the remote + // timestamp metadata. + if (!checkRemote) { + return; + } + } + catch (error) { + // continue + } + //Load from remote (whether local load succeeded or not) + const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json'); + // Client workflow 5.4.1: download timestamp metadata file + const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength); + try { + // Client workflow 5.4.2 - 5.4.4 + this.trustedSet.updateTimestamp(bytesData); + } + catch (error) { + // If new timestamp version is same as current, discardd the new one. + // This is normal and should NOT raise an error. + if (error instanceof error_1.EqualVersionError) { + return; + } + // Re-raise any other error + throw error; + } + // Client workflow 5.4.5: persist timestamp metadata + this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData); + } + // Load local and remote snapshot metadata. + // Client workflow 5.5: update snapshot role + async loadSnapshot() { + //Load local (and if needed remote) snapshot metadata + try { + const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot); + this.trustedSet.updateSnapshot(data, true); + } + catch (error) { + if (!this.trustedSet.timestamp) { + throw new ReferenceError('No timestamp metadata'); + } + const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta; + const maxLength = snapshotMeta.length || this.config.snapshotMaxLength; + const version = this.trustedSet.root.signed.consistentSnapshot + ? snapshotMeta.version + : undefined; + const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json'); + try { + // Client workflow 5.5.1: download snapshot metadata file + const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength); + // Client workflow 5.5.2 - 5.5.6 + this.trustedSet.updateSnapshot(bytesData); + // Client workflow 5.5.7: persist snapshot metadata file + this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData); + } + catch (error) { + throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`); + } + } + } + // Load local and remote targets metadata. + // Client workflow 5.6: update targets role + async loadTargets(role, parentRole) { + if (this.trustedSet.getRole(role)) { + return this.trustedSet.getRole(role); + } + try { + const buffer = this.loadLocalMetadata(role); + this.trustedSet.updateDelegatedTargets(buffer, role, parentRole); + } + catch (error) { + // Local 'role' does not exist or is invalid: update from remote + if (!this.trustedSet.snapshot) { + throw new ReferenceError('No snapshot metadata'); + } + const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`]; + // TODO: use length for fetching + const maxLength = metaInfo.length || this.config.targetsMaxLength; + const version = this.trustedSet.root.signed.consistentSnapshot + ? metaInfo.version + : undefined; + const encodedRole = encodeURIComponent(role); + const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`); + try { + // Client workflow 5.6.1: download targets metadata file + const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength); + // Client workflow 5.6.2 - 5.6.6 + this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole); + // Client workflow 5.6.7: persist targets metadata file + this.persistMetadata(role, bytesData); + } + catch (error) { + throw new error_1.RuntimeError(`Unable to load targets error ${error}`); + } + } + return this.trustedSet.getRole(role); + } + async preorderDepthFirstWalk(targetPath) { + // Interrogates the tree of target delegations in order of appearance + // (which implicitly order trustworthiness), and returns the matching + // target found in the most trusted role. + // List of delegations to be interrogated. A (role, parent role) pair + // is needed to load and verify the delegated targets metadata. + const delegationsToVisit = [ + { + roleName: models_1.MetadataKind.Targets, + parentRoleName: models_1.MetadataKind.Root, + }, + ]; + const visitedRoleNames = new Set(); + // Client workflow 5.6.7: preorder depth-first traversal of the graph of + // target delegations + while (visitedRoleNames.size <= this.config.maxDelegations && + delegationsToVisit.length > 0) { + // Pop the role name from the top of the stack. + const { roleName, parentRoleName } = delegationsToVisit.pop(); + // Skip any visited current role to prevent cycles. + // Client workflow 5.6.7.1: skip already-visited roles + if (visitedRoleNames.has(roleName)) { + continue; + } + // The metadata for 'role_name' must be downloaded/updated before + // its targets, delegations, and child roles can be inspected. + const targets = (await this.loadTargets(roleName, parentRoleName)) + ?.signed; + if (!targets) { + continue; + } + const target = targets.targets?.[targetPath]; + if (target) { + return target; + } + // After preorder check, add current role to set of visited roles. + visitedRoleNames.add(roleName); + if (targets.delegations) { + const childRolesToVisit = []; + // NOTE: This may be a slow operation if there are many delegated roles. + const rolesForTarget = targets.delegations.rolesForTarget(targetPath); + for (const { role: childName, terminating } of rolesForTarget) { + childRolesToVisit.push({ + roleName: childName, + parentRoleName: roleName, + }); + // Client workflow 5.6.7.2.1 + if (terminating) { + delegationsToVisit.splice(0); // empty the array + break; + } + } + childRolesToVisit.reverse(); + delegationsToVisit.push(...childRolesToVisit); + } + } + return; // no matching target found + } + generateTargetPath(targetInfo) { + if (!this.targetDir) { + throw new error_1.ValueError('Target directory not set'); + } + // URL encode target path + const filePath = encodeURIComponent(targetInfo.path); + return path.join(this.targetDir, filePath); + } + persistMetadata(metaDataName, bytesData) { + const encodedName = encodeURIComponent(metaDataName); + try { + const filePath = path.join(this.dir, `${encodedName}.json`); + log('WRITE %s', filePath); + fs.writeFileSync(filePath, bytesData.toString('utf8')); + } + catch (error) { + throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`); + } + } +} +exports.Updater = Updater; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js new file mode 100644 index 0000000000000..923eef6044bcc --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js @@ -0,0 +1,25 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.withTempFile = void 0; +const promises_1 = __importDefault(require("fs/promises")); +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +// Invokes the given handler with the path to a temporary file. The file +// is deleted after the handler returns. +const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile'))); +exports.withTempFile = withTempFile; +// Invokes the given handler with a temporary directory. The directory is +// deleted after the handler returns. +const withTempDir = async (handler) => { + const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir()); + const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep); + try { + return await handler(dir); + } + finally { + await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 }); + } +}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js new file mode 100644 index 0000000000000..359d1f3ef385b --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.join = join; +const url_1 = require("url"); +function join(base, path) { + return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString(); +} +function ensureTrailingSlash(path) { + return path.endsWith('/') ? path : path + '/'; +} +function removeLeadingSlash(path) { + return path.startsWith('/') ? path.slice(1) : path; +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json new file mode 100644 index 0000000000000..e79a3d45f3f06 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json @@ -0,0 +1,43 @@ +{ + "name": "tuf-js", + "version": "3.0.1", + "description": "JavaScript implementation of The Update Framework (TUF)", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "files": [ + "dist" + ], + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", + "devDependencies": { + "@tufjs/repo-mock": "3.0.1", + "@types/debug": "^4.1.12", + "@types/make-fetch-happen": "^10.0.4" + }, + "dependencies": { + "@tufjs/models": "3.0.1", + "debug": "^4.3.6", + "make-fetch-happen": "^14.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json index d4c3cf54d83ea..df0b8f2f4faf1 100644 --- a/node_modules/@npmcli/metavuln-calculator/package.json +++ b/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "8.0.0", + "version": "8.0.1", "main": "lib/index.js", "files": [ "bin/", @@ -41,7 +41,7 @@ "dependencies": { "cacache": "^19.0.0", "json-parse-even-better-errors": "^4.0.0", - "pacote": "^19.0.0", + "pacote": "^20.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5" }, diff --git a/package-lock.json b/package-lock.json index a72ece9382ccd..0bc0435f3afb8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2407,14 +2407,14 @@ } }, "node_modules/@npmcli/metavuln-calculator": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-8.0.0.tgz", - "integrity": "sha512-zR2TGfhR8fH1u4VRz9fuC7r1nI9dweViRDsFnMH8J89OA90lJNwF6idTttEzYSWaOTW4NVoAIB6+ujV+/wI+kg==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-8.0.1.tgz", + "integrity": "sha512-WXlJx9cz3CfHSt9W9Opi1PTFc4WZLFomm5O8wekxQZmkyljrBRwATwDxfC9iOXJwYVmfiW1C1dUe0W2aN0UrSg==", "license": "ISC", "dependencies": { "cacache": "^19.0.0", "json-parse-even-better-errors": "^4.0.0", - "pacote": "^19.0.0", + "pacote": "^20.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5" }, @@ -2422,6 +2422,146 @@ "node": "^18.17.0 || >=20.5.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.0.0.tgz", + "integrity": "sha512-XDUYX56iMPAn/cdgh/DTJxz5RWmqKV4pwvUAEKEWJl+HzKdCd/24wUa9JYNMlDSCb7SUHAdtksxYX779Nne/Zg==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", + "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", + "license": "Apache-2.0", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.0.0.tgz", + "integrity": "sha512-UjhDMQOkyDoktpXoc5YPJpJK6IooF2gayAr5LvXI4EL7O0vd58okgfRcxuaH+YTdhvb5aa1Q9f+WJ0c2sVuYIw==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^14.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.0.0.tgz", + "integrity": "sha512-9Xxy/8U5OFJu7s+OsHzI96IX/OzjF/zj0BSSaWhgJgTqtlBhQIV2xdrQI5qxLD7+CWWDepadnXAxzaZ3u9cvRw==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.0.0.tgz", + "integrity": "sha512-Ggtq2GsJuxFNUvQzLoXqRwS4ceRfLAJnrIHUDrzAD0GgnOhwujJkKkxM/s5Bako07c3WtAs/sZo5PJq7VHjeDg==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", + "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-20.0.0.tgz", + "integrity": "sha512-pRjC5UFwZCgx9kUFDVM9YEahv4guZ1nSLqwmWiLUnDbGsjs+U5w7z6Uc8HNR1a6x8qnu5y9xtGE6D1uAuYz+0A==", + "license": "ISC", + "dependencies": { + "@npmcli/git": "^6.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "@npmcli/run-script": "^9.0.0", + "cacache": "^19.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^12.0.0", + "npm-packlist": "^9.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1", + "sigstore": "^3.0.0", + "ssri": "^12.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "bin/index.js" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/sigstore": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.0.0.tgz", + "integrity": "sha512-PHMifhh3EN4loMcHCz6l3v/luzgT3za+9f8subGgeMNjbJjzH4Ij/YoX3Gvu+kaouJRIlVdTHHCREADYf+ZteA==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^3.0.0", + "@sigstore/tuf": "^3.0.0", + "@sigstore/verify": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz", + "integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==", + "license": "MIT", + "dependencies": { + "@tufjs/models": "3.0.1", + "debug": "^4.3.6", + "make-fetch-happen": "^14.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, "node_modules/@npmcli/mock-globals": { "resolved": "mock-globals", "link": true From 47f445fbfaa5298b7121394455ac834909d48c37 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:11:37 -0800 Subject: [PATCH 08/16] deps: cross-spawn@7.0.6 --- node_modules/cross-spawn/lib/enoent.js | 2 +- node_modules/cross-spawn/lib/util/escape.js | 6 ++++-- node_modules/cross-spawn/package.json | 4 ++-- package-lock.json | 6 +++--- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/node_modules/cross-spawn/lib/enoent.js b/node_modules/cross-spawn/lib/enoent.js index 14df9b623d0a2..da33471369c23 100644 --- a/node_modules/cross-spawn/lib/enoent.js +++ b/node_modules/cross-spawn/lib/enoent.js @@ -24,7 +24,7 @@ function hookChildProcess(cp, parsed) { // the command exists and emit an "error" instead // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 if (name === 'exit') { - const err = verifyENOENT(arg1, parsed, 'spawn'); + const err = verifyENOENT(arg1, parsed); if (err) { return originalEmit.call(cp, 'error', err); diff --git a/node_modules/cross-spawn/lib/util/escape.js b/node_modules/cross-spawn/lib/util/escape.js index b0bb84c3a1409..7bf2905cd035a 100644 --- a/node_modules/cross-spawn/lib/util/escape.js +++ b/node_modules/cross-spawn/lib/util/escape.js @@ -15,15 +15,17 @@ function escapeArgument(arg, doubleEscapeMetaChars) { arg = `${arg}`; // Algorithm below is based on https://qntm.org/cmd + // It's slightly altered to disable JS backtracking to avoid hanging on specially crafted input + // Please see https://github.com/moxystudio/node-cross-spawn/pull/160 for more information // Sequence of backslashes followed by a double quote: // double up all the backslashes and escape the double quote - arg = arg.replace(/(\\*)"/g, '$1$1\\"'); + arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"'); // Sequence of backslashes followed by the end of the string // (which will become a double quote later): // double up all the backslashes - arg = arg.replace(/(\\*)$/, '$1$1'); + arg = arg.replace(/(?=(\\+?)?)\1$/, '$1$1'); // All other backslashes occur literally diff --git a/node_modules/cross-spawn/package.json b/node_modules/cross-spawn/package.json index 232ff97e04b21..24b2eb4c9900c 100644 --- a/node_modules/cross-spawn/package.json +++ b/node_modules/cross-spawn/package.json @@ -1,6 +1,6 @@ { "name": "cross-spawn", - "version": "7.0.3", + "version": "7.0.6", "description": "Cross platform child_process#spawn and child_process#spawnSync", "keywords": [ "spawn", @@ -65,7 +65,7 @@ "lint-staged": "^9.2.5", "mkdirp": "^0.5.1", "rimraf": "^3.0.0", - "standard-version": "^7.0.0" + "standard-version": "^9.5.0" }, "engines": { "node": ">= 8" diff --git a/package-lock.json b/package-lock.json index 0bc0435f3afb8..9e89b33734061 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5569,9 +5569,9 @@ } }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "inBundle": true, "license": "MIT", "dependencies": { From 5b10d93583825a77e8b1378db1e4be80b4e1ace0 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:12:01 -0800 Subject: [PATCH 09/16] deps: debug@4.3.7 --- node_modules/.gitignore | 3 - node_modules/debug/node_modules/ms/index.js | 162 ------------------ node_modules/debug/node_modules/ms/license.md | 21 --- .../debug/node_modules/ms/package.json | 37 ---- node_modules/debug/package.json | 4 +- package-lock.json | 15 +- 6 files changed, 6 insertions(+), 236 deletions(-) delete mode 100644 node_modules/debug/node_modules/ms/index.js delete mode 100644 node_modules/debug/node_modules/ms/license.md delete mode 100644 node_modules/debug/node_modules/ms/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 1a0706de3b8f3..6f2a5e2c034e8 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -108,9 +108,6 @@ !/cross-spawn/node_modules/which !/cssesc !/debug -!/debug/node_modules/ -/debug/node_modules/* -!/debug/node_modules/ms !/diff !/eastasianwidth !/emoji-regex diff --git a/node_modules/debug/node_modules/ms/index.js b/node_modules/debug/node_modules/ms/index.js deleted file mode 100644 index c4498bcc21258..0000000000000 --- a/node_modules/debug/node_modules/ms/index.js +++ /dev/null @@ -1,162 +0,0 @@ -/** - * Helpers. - */ - -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; -var w = d * 7; -var y = d * 365.25; - -/** - * Parse or format the given `val`. - * - * Options: - * - * - `long` verbose formatting [false] - * - * @param {String|Number} val - * @param {Object} [options] - * @throws {Error} throw an error if val is not a non-empty string or a number - * @return {String|Number} - * @api public - */ - -module.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === 'string' && val.length > 0) { - return parse(val); - } else if (type === 'number' && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - 'val is not a non-empty string or a valid number. val=' + - JSON.stringify(val) - ); -}; - -/** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private - */ - -function parse(str) { - str = String(str); - if (str.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type = (match[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'yrs': - case 'yr': - case 'y': - return n * y; - case 'weeks': - case 'week': - case 'w': - return n * w; - case 'days': - case 'day': - case 'd': - return n * d; - case 'hours': - case 'hour': - case 'hrs': - case 'hr': - case 'h': - return n * h; - case 'minutes': - case 'minute': - case 'mins': - case 'min': - case 'm': - return n * m; - case 'seconds': - case 'second': - case 'secs': - case 'sec': - case 's': - return n * s; - case 'milliseconds': - case 'millisecond': - case 'msecs': - case 'msec': - case 'ms': - return n; - default: - return undefined; - } -} - -/** - * Short format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + 'd'; - } - if (msAbs >= h) { - return Math.round(ms / h) + 'h'; - } - if (msAbs >= m) { - return Math.round(ms / m) + 'm'; - } - if (msAbs >= s) { - return Math.round(ms / s) + 's'; - } - return ms + 'ms'; -} - -/** - * Long format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, 'day'); - } - if (msAbs >= h) { - return plural(ms, msAbs, h, 'hour'); - } - if (msAbs >= m) { - return plural(ms, msAbs, m, 'minute'); - } - if (msAbs >= s) { - return plural(ms, msAbs, s, 'second'); - } - return ms + ' ms'; -} - -/** - * Pluralization helper. - */ - -function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); -} diff --git a/node_modules/debug/node_modules/ms/license.md b/node_modules/debug/node_modules/ms/license.md deleted file mode 100644 index 69b61253a3892..0000000000000 --- a/node_modules/debug/node_modules/ms/license.md +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Zeit, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/debug/node_modules/ms/package.json b/node_modules/debug/node_modules/ms/package.json deleted file mode 100644 index eea666e1fb03d..0000000000000 --- a/node_modules/debug/node_modules/ms/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "ms", - "version": "2.1.2", - "description": "Tiny millisecond conversion utility", - "repository": "zeit/ms", - "main": "./index", - "files": [ - "index.js" - ], - "scripts": { - "precommit": "lint-staged", - "lint": "eslint lib/* bin/*", - "test": "mocha tests.js" - }, - "eslintConfig": { - "extends": "eslint:recommended", - "env": { - "node": true, - "es6": true - } - }, - "lint-staged": { - "*.js": [ - "npm run lint", - "prettier --single-quote --write", - "git add" - ] - }, - "license": "MIT", - "devDependencies": { - "eslint": "4.12.1", - "expect.js": "0.3.1", - "husky": "0.14.3", - "lint-staged": "5.0.0", - "mocha": "4.0.1" - } -} diff --git a/node_modules/debug/package.json b/node_modules/debug/package.json index 8eea05520554e..2f782eb9aef45 100644 --- a/node_modules/debug/package.json +++ b/node_modules/debug/package.json @@ -1,6 +1,6 @@ { "name": "debug", - "version": "4.3.6", + "version": "4.3.7", "repository": { "type": "git", "url": "git://github.com/debug-js/debug.git" @@ -31,7 +31,7 @@ "test:coverage": "cat ./coverage/lcov.info | coveralls" }, "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "devDependencies": { "brfs": "^2.0.1", diff --git a/package-lock.json b/package-lock.json index 9e89b33734061..ae23612aeaef8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5793,13 +5793,13 @@ } }, "node_modules/debug": { - "version": "4.3.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", - "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", "inBundle": true, "license": "MIT", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -5810,13 +5810,6 @@ } } }, - "node_modules/debug/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "inBundle": true, - "license": "MIT" - }, "node_modules/decamelize": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", From c9e915de7d556e359e64e902992a6081a6484123 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:12:24 -0800 Subject: [PATCH 10/16] deps: negotiator@0.6.4 --- node_modules/negotiator/HISTORY.md | 5 ++++ node_modules/negotiator/index.js | 8 +++--- node_modules/negotiator/lib/encoding.js | 31 ++++++++++++++++++++---- node_modules/negotiator/lib/mediaType.js | 6 ++--- node_modules/negotiator/package.json | 2 +- package-lock.json | 6 ++--- 6 files changed, 42 insertions(+), 16 deletions(-) diff --git a/node_modules/negotiator/HISTORY.md b/node_modules/negotiator/HISTORY.md index a9a544914c43b..e1929aba8e22c 100644 --- a/node_modules/negotiator/HISTORY.md +++ b/node_modules/negotiator/HISTORY.md @@ -1,3 +1,8 @@ +unreleased +================== + + * Added an option preferred encodings array #59 + 0.6.3 / 2022-01-22 ================== diff --git a/node_modules/negotiator/index.js b/node_modules/negotiator/index.js index 4788264b16c9f..7df0b0a531815 100644 --- a/node_modules/negotiator/index.js +++ b/node_modules/negotiator/index.js @@ -44,13 +44,13 @@ Negotiator.prototype.charsets = function charsets(available) { return preferredCharsets(this.request.headers['accept-charset'], available); }; -Negotiator.prototype.encoding = function encoding(available) { - var set = this.encodings(available); +Negotiator.prototype.encoding = function encoding(available, preferred) { + var set = this.encodings(available, preferred); return set && set[0]; }; -Negotiator.prototype.encodings = function encodings(available) { - return preferredEncodings(this.request.headers['accept-encoding'], available); +Negotiator.prototype.encodings = function encodings(available, preferred) { + return preferredEncodings(this.request.headers['accept-encoding'], available, preferred); }; Negotiator.prototype.language = function language(available) { diff --git a/node_modules/negotiator/lib/encoding.js b/node_modules/negotiator/lib/encoding.js index 8432cd77b8a96..9ebb633d67743 100644 --- a/node_modules/negotiator/lib/encoding.js +++ b/node_modules/negotiator/lib/encoding.js @@ -96,7 +96,7 @@ function parseEncoding(str, i) { */ function getEncodingPriority(encoding, accepted, index) { - var priority = {o: -1, q: 0, s: 0}; + var priority = {encoding: encoding, o: -1, q: 0, s: 0}; for (var i = 0; i < accepted.length; i++) { var spec = specify(encoding, accepted[i], index); @@ -123,6 +123,7 @@ function specify(encoding, spec, index) { } return { + encoding: encoding, i: index, o: spec.i, q: spec.q, @@ -135,14 +136,34 @@ function specify(encoding, spec, index) { * @public */ -function preferredEncodings(accept, provided) { +function preferredEncodings(accept, provided, preferred) { var accepts = parseAcceptEncoding(accept || ''); + var comparator = preferred ? function comparator (a, b) { + if (a.q !== b.q) { + return b.q - a.q // higher quality first + } + + var aPreferred = preferred.indexOf(a.encoding) + var bPreferred = preferred.indexOf(b.encoding) + + if (aPreferred === -1 && bPreferred === -1) { + // consider the original specifity/order + return (b.s - a.s) || (a.o - b.o) || (a.i - b.i) + } + + if (aPreferred !== -1 && bPreferred !== -1) { + return aPreferred - bPreferred // consider the preferred order + } + + return aPreferred === -1 ? 1 : -1 // preferred first + } : compareSpecs; + if (!provided) { // sorted list of all encodings return accepts .filter(isQuality) - .sort(compareSpecs) + .sort(comparator) .map(getFullEncoding); } @@ -151,7 +172,7 @@ function preferredEncodings(accept, provided) { }); // sorted list of accepted encodings - return priorities.filter(isQuality).sort(compareSpecs).map(function getEncoding(priority) { + return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) { return provided[priorities.indexOf(priority)]; }); } @@ -162,7 +183,7 @@ function preferredEncodings(accept, provided) { */ function compareSpecs(a, b) { - return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i); } /** diff --git a/node_modules/negotiator/lib/mediaType.js b/node_modules/negotiator/lib/mediaType.js index 67309dd75f1b6..8e402ea88394c 100644 --- a/node_modules/negotiator/lib/mediaType.js +++ b/node_modules/negotiator/lib/mediaType.js @@ -69,7 +69,7 @@ function parseMediaType(str, i) { // get the value, unwrapping quotes var value = val && val[0] === '"' && val[val.length - 1] === '"' - ? val.substr(1, val.length - 2) + ? val.slice(1, -1) : val; if (key === 'q') { @@ -238,8 +238,8 @@ function splitKeyValuePair(str) { if (index === -1) { key = str; } else { - key = str.substr(0, index); - val = str.substr(index + 1); + key = str.slice(0, index); + val = str.slice(index + 1); } return [key, val]; diff --git a/node_modules/negotiator/package.json b/node_modules/negotiator/package.json index 297635f6d3417..19b0a8a6ef604 100644 --- a/node_modules/negotiator/package.json +++ b/node_modules/negotiator/package.json @@ -1,7 +1,7 @@ { "name": "negotiator", "description": "HTTP content negotiation", - "version": "0.6.3", + "version": "0.6.4", "contributors": [ "Douglas Christopher Wilson ", "Federico Romero ", diff --git a/package-lock.json b/package-lock.json index ae23612aeaef8..24c6c9478ab68 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11112,9 +11112,9 @@ } }, "node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", + "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", "inBundle": true, "license": "MIT", "engines": { From 44263b1b56eb318dfb4f52f1073df2ae7c710afa Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:12:58 -0800 Subject: [PATCH 11/16] deps: package-json-from-dist@1.0.1 --- .../package-json-from-dist/dist/commonjs/index.js | 12 +++++++++++- .../package-json-from-dist/dist/esm/index.js | 12 +++++++++++- node_modules/package-json-from-dist/package.json | 4 ++-- package-lock.json | 6 +++--- 4 files changed, 27 insertions(+), 7 deletions(-) diff --git a/node_modules/package-json-from-dist/dist/commonjs/index.js b/node_modules/package-json-from-dist/dist/commonjs/index.js index 5cff210d855cb..b966ac9fef535 100644 --- a/node_modules/package-json-from-dist/dist/commonjs/index.js +++ b/node_modules/package-json-from-dist/dist/commonjs/index.js @@ -5,6 +5,8 @@ const node_fs_1 = require("node:fs"); const node_path_1 = require("node:path"); const node_url_1 = require("node:url"); const NM = `${node_path_1.sep}node_modules${node_path_1.sep}`; +const STORE = `.store${node_path_1.sep}`; +const PKG = `${node_path_1.sep}package${node_path_1.sep}`; const DIST = `${node_path_1.sep}dist${node_path_1.sep}`; /** * Find the package.json file, either from a TypeScript file somewhere not @@ -59,8 +61,16 @@ const findPackageJson = (from, pathFromSrc = '../package.json') => { // inside of node_modules. find the dist directly under package name. const nm = __dirname.substring(0, nms + NM.length); const pkgDir = __dirname.substring(nms + NM.length); + // affordance for yarn berry, which puts package contents in + // '.../node_modules/.store/${id}-${hash}/package/...' + if (pkgDir.startsWith(STORE)) { + const pkg = pkgDir.indexOf(PKG, STORE.length); + if (pkg) { + return (0, node_path_1.resolve)(nm, pkgDir.substring(0, pkg + PKG.length), 'package.json'); + } + } const pkgName = pkgDir.startsWith('@') ? - pkgDir.split(node_path_1.sep).slice(0, 2).join(node_path_1.sep) + pkgDir.split(node_path_1.sep, 2).join(node_path_1.sep) : String(pkgDir.split(node_path_1.sep)[0]); return (0, node_path_1.resolve)(nm, pkgName, 'package.json'); } diff --git a/node_modules/package-json-from-dist/dist/esm/index.js b/node_modules/package-json-from-dist/dist/esm/index.js index 0627645f9c35a..426ad3c2d1859 100644 --- a/node_modules/package-json-from-dist/dist/esm/index.js +++ b/node_modules/package-json-from-dist/dist/esm/index.js @@ -2,6 +2,8 @@ import { readFileSync } from 'node:fs'; import { dirname, resolve, sep } from 'node:path'; import { fileURLToPath } from 'node:url'; const NM = `${sep}node_modules${sep}`; +const STORE = `.store${sep}`; +const PKG = `${sep}package${sep}`; const DIST = `${sep}dist${sep}`; /** * Find the package.json file, either from a TypeScript file somewhere not @@ -56,8 +58,16 @@ export const findPackageJson = (from, pathFromSrc = '../package.json') => { // inside of node_modules. find the dist directly under package name. const nm = __dirname.substring(0, nms + NM.length); const pkgDir = __dirname.substring(nms + NM.length); + // affordance for yarn berry, which puts package contents in + // '.../node_modules/.store/${id}-${hash}/package/...' + if (pkgDir.startsWith(STORE)) { + const pkg = pkgDir.indexOf(PKG, STORE.length); + if (pkg) { + return resolve(nm, pkgDir.substring(0, pkg + PKG.length), 'package.json'); + } + } const pkgName = pkgDir.startsWith('@') ? - pkgDir.split(sep).slice(0, 2).join(sep) + pkgDir.split(sep, 2).join(sep) : String(pkgDir.split(sep)[0]); return resolve(nm, pkgName, 'package.json'); } diff --git a/node_modules/package-json-from-dist/package.json b/node_modules/package-json-from-dist/package.json index 2d5526e87b7fa..a2d03c3269d72 100644 --- a/node_modules/package-json-from-dist/package.json +++ b/node_modules/package-json-from-dist/package.json @@ -1,6 +1,6 @@ { "name": "package-json-from-dist", - "version": "1.0.0", + "version": "1.0.1", "description": "Load the local package.json from either src or dist folder", "main": "./dist/commonjs/index.js", "exports": { @@ -28,7 +28,7 @@ "presnap": "npm run prepare", "test": "tap", "snap": "tap", - "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", + "format": "prettier --write . --log-level warn", "typedoc": "typedoc" }, "author": "Isaac Z. Schlueter (https://izs.me)", diff --git a/package-lock.json b/package-lock.json index 24c6c9478ab68..2b2d6ca1feee4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12205,9 +12205,9 @@ } }, "node_modules/package-json-from-dist": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", - "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", "inBundle": true, "license": "BlueOak-1.0.0" }, From 529b81c3d3378aba458a7e7158af020110f1443c Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:13:18 -0800 Subject: [PATCH 12/16] deps: promise-call-limit@3.0.2 --- .../promise-call-limit/dist/commonjs/index.js | 4 ++-- .../promise-call-limit/dist/esm/index.js | 4 ++-- node_modules/promise-call-limit/package.json | 18 ++++++++++-------- package-lock.json | 6 +++--- 4 files changed, 17 insertions(+), 15 deletions(-) diff --git a/node_modules/promise-call-limit/dist/commonjs/index.js b/node_modules/promise-call-limit/dist/commonjs/index.js index 6ce5cfcef9559..b32a85bb11aa3 100644 --- a/node_modules/promise-call-limit/dist/commonjs/index.js +++ b/node_modules/promise-call-limit/dist/commonjs/index.js @@ -29,8 +29,8 @@ const os = __importStar(require("node:os")); // cpus() cpus() can return an empty list if /proc is not mounted, use 1 in // this case /* c8 ignore start */ -const defLimit = 'availableParallelism' in os - ? Math.max(1, os.availableParallelism() - 1) +const defLimit = 'availableParallelism' in os ? + Math.max(1, os.availableParallelism() - 1) : Math.max(1, os.cpus().length - 1); const callLimit = (queue, { limit = defLimit, rejectLate } = {}) => new Promise((res, rej) => { let active = 0; diff --git a/node_modules/promise-call-limit/dist/esm/index.js b/node_modules/promise-call-limit/dist/esm/index.js index 030099929b348..fe709db7fc04c 100644 --- a/node_modules/promise-call-limit/dist/esm/index.js +++ b/node_modules/promise-call-limit/dist/esm/index.js @@ -3,8 +3,8 @@ import * as os from 'node:os'; // cpus() cpus() can return an empty list if /proc is not mounted, use 1 in // this case /* c8 ignore start */ -const defLimit = 'availableParallelism' in os - ? Math.max(1, os.availableParallelism() - 1) +const defLimit = 'availableParallelism' in os ? + Math.max(1, os.availableParallelism() - 1) : Math.max(1, os.cpus().length - 1); export const callLimit = (queue, { limit = defLimit, rejectLate } = {}) => new Promise((res, rej) => { let active = 0; diff --git a/node_modules/promise-call-limit/package.json b/node_modules/promise-call-limit/package.json index a3aa548d6538a..ab14595366e22 100644 --- a/node_modules/promise-call-limit/package.json +++ b/node_modules/promise-call-limit/package.json @@ -1,6 +1,6 @@ { "name": "promise-call-limit", - "version": "3.0.1", + "version": "3.0.2", "files": [ "dist" ], @@ -18,16 +18,17 @@ "test": "tap", "preversion": "npm test", "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --log-level warn --cache" }, "devDependencies": { - "prettier": "^3.2.1", - "tap": "^18.6.1", - "tshy": "^1.8.2", - "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", - "typedoc": "typedoc" + "prettier": "^3.3.3", + "tap": "^21.0.1", + "tshy": "^3.0.2", + "typedoc": "^0.26.6" }, "prettier": { + "experimentalTernaries": true, "semi": false, "printWidth": 70, "tabWidth": 2, @@ -62,5 +63,6 @@ }, "main": "./dist/commonjs/index.js", "types": "./dist/commonjs/index.d.ts", - "type": "module" + "type": "module", + "module": "./dist/esm/index.js" } diff --git a/package-lock.json b/package-lock.json index 2b2d6ca1feee4..55d0200857238 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12678,9 +12678,9 @@ } }, "node_modules/promise-call-limit": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/promise-call-limit/-/promise-call-limit-3.0.1.tgz", - "integrity": "sha512-utl+0x8gIDasV5X+PI5qWEPqH6fJS0pFtQ/4gZ95xfEFb/89dmh+/b895TbFDBLiafBvxD/PGTKfvxl4kH/pQg==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/promise-call-limit/-/promise-call-limit-3.0.2.tgz", + "integrity": "sha512-mRPQO2T1QQVw11E7+UdCJu7S61eJVWknzml9sC1heAdj1jxl0fWMBypIt9ZOcLFf8FkG995ZD7RnVk7HH72fZw==", "license": "ISC", "funding": { "url": "https://github.com/sponsors/isaacs" From 4f0aa4775a11f11200c3f110b30da4ab57a978b9 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:13:45 -0800 Subject: [PATCH 13/16] deps: spdx-license-ids@3.0.20 --- node_modules/spdx-license-ids/deprecated.json | 1 + node_modules/spdx-license-ids/index.json | 8 +++++++- node_modules/spdx-license-ids/package.json | 2 +- package-lock.json | 6 +++--- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/node_modules/spdx-license-ids/deprecated.json b/node_modules/spdx-license-ids/deprecated.json index 278531e40c613..4f70a14c7469d 100644 --- a/node_modules/spdx-license-ids/deprecated.json +++ b/node_modules/spdx-license-ids/deprecated.json @@ -19,6 +19,7 @@ "LGPL-2.0", "LGPL-2.1", "LGPL-3.0", + "Net-SNMP", "Nunit", "StandardML-NJ", "bzip2-1.0.5", diff --git a/node_modules/spdx-license-ids/index.json b/node_modules/spdx-license-ids/index.json index c7686a710d61d..f43d5016bd95a 100644 --- a/node_modules/spdx-license-ids/index.json +++ b/node_modules/spdx-license-ids/index.json @@ -197,6 +197,8 @@ "DRL-1.0", "DRL-1.1", "DSDP", + "DocBook-Schema", + "DocBook-XML", "Dotseqn", "ECL-1.0", "ECL-2.0", @@ -260,6 +262,7 @@ "Glulxe", "Graphics-Gems", "Gutmann", + "HIDAPI", "HP-1986", "HP-1989", "HPND", @@ -270,6 +273,7 @@ "HPND-Kevlin-Henney", "HPND-MIT-disclaimer", "HPND-Markus-Kuhn", + "HPND-Netrek", "HPND-Pbmplus", "HPND-UC", "HPND-UC-export-US", @@ -403,7 +407,6 @@ "NTP", "NTP-0", "Naumen", - "Net-SNMP", "NetCDF", "Newsletr", "Nokia", @@ -485,6 +488,7 @@ "RSCPL", "Rdisc", "Ruby", + "Ruby-pty", "SAX-PD", "SAX-PD-2.0", "SCEA", @@ -541,6 +545,7 @@ "UMich-Merit", "UPL-1.0", "URT-RLE", + "Ubuntu-font-1.0", "Unicode-3.0", "Unicode-DFS-2015", "Unicode-DFS-2016", @@ -559,6 +564,7 @@ "Wsuipa", "X11", "X11-distribute-modifications-variant", + "X11-swapped", "XFree86-1.1", "XSkat", "Xdebug-1.03", diff --git a/node_modules/spdx-license-ids/package.json b/node_modules/spdx-license-ids/package.json index 5f5ed9554f257..7ab34aab6b8b1 100644 --- a/node_modules/spdx-license-ids/package.json +++ b/node_modules/spdx-license-ids/package.json @@ -1,6 +1,6 @@ { "name": "spdx-license-ids", - "version": "3.0.18", + "version": "3.0.20", "description": "A list of SPDX license identifiers", "repository": "jslicense/spdx-license-ids", "author": "Shinnosuke Watanabe (https://github.com/shinnn)", diff --git a/package-lock.json b/package-lock.json index 55d0200857238..bffde21c6c9db 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14074,9 +14074,9 @@ } }, "node_modules/spdx-license-ids": { - "version": "3.0.18", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.18.tgz", - "integrity": "sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==", + "version": "3.0.20", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.20.tgz", + "integrity": "sha512-jg25NiDV/1fLtSgEgyvVyDunvaNHbuwF9lfNV17gSmPFAlYzdfNBlLtLzXTevwkPj7DhGbmN9VnmJIgLnhvaBw==", "inBundle": true, "license": "CC0-1.0" }, From fa061b41847c248e747dbbaa2bc4e14b28c4ac9a Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:49:31 -0800 Subject: [PATCH 14/16] deps: sigstore@3.0.0 --- DEPENDENCIES.md | 10 +- mock-registry/package.json | 2 +- node_modules/.gitignore | 18 - node_modules/@sigstore/bundle/LICENSE | 202 ------ node_modules/@sigstore/bundle/dist/build.js | 101 --- node_modules/@sigstore/bundle/dist/bundle.js | 24 - node_modules/@sigstore/bundle/dist/error.js | 25 - node_modules/@sigstore/bundle/dist/index.js | 43 -- .../@sigstore/bundle/dist/serialized.js | 49 -- node_modules/@sigstore/bundle/dist/utility.js | 2 - .../@sigstore/bundle/dist/validate.js | 199 ------ node_modules/@sigstore/bundle/package.json | 35 -- node_modules/@sigstore/core/LICENSE | 202 ------ .../@sigstore/core/dist/asn1/error.js | 24 - .../@sigstore/core/dist/asn1/index.js | 20 - .../@sigstore/core/dist/asn1/length.js | 63 -- node_modules/@sigstore/core/dist/asn1/obj.js | 152 ----- .../@sigstore/core/dist/asn1/parse.js | 125 ---- node_modules/@sigstore/core/dist/asn1/tag.js | 86 --- node_modules/@sigstore/core/dist/crypto.js | 71 --- node_modules/@sigstore/core/dist/dsse.js | 31 - node_modules/@sigstore/core/dist/encoding.js | 28 - node_modules/@sigstore/core/dist/index.js | 56 -- node_modules/@sigstore/core/dist/json.js | 61 -- node_modules/@sigstore/core/dist/oid.js | 14 - node_modules/@sigstore/core/dist/pem.js | 44 -- .../@sigstore/core/dist/rfc3161/error.js | 21 - .../@sigstore/core/dist/rfc3161/index.js | 20 - .../@sigstore/core/dist/rfc3161/timestamp.js | 201 ------ .../@sigstore/core/dist/rfc3161/tstinfo.js | 61 -- node_modules/@sigstore/core/dist/stream.js | 115 ---- node_modules/@sigstore/core/dist/x509/cert.js | 226 ------- node_modules/@sigstore/core/dist/x509/ext.js | 145 ----- .../@sigstore/core/dist/x509/index.js | 23 - node_modules/@sigstore/core/dist/x509/sct.js | 141 ----- node_modules/@sigstore/core/package.json | 31 - node_modules/@sigstore/sign/LICENSE | 202 ------ .../@sigstore/sign/dist/bundler/base.js | 50 -- .../@sigstore/sign/dist/bundler/bundle.js | 71 --- .../@sigstore/sign/dist/bundler/dsse.js | 46 -- .../@sigstore/sign/dist/bundler/index.js | 7 - .../@sigstore/sign/dist/bundler/message.js | 30 - node_modules/@sigstore/sign/dist/error.js | 39 -- .../@sigstore/sign/dist/external/error.js | 26 - .../@sigstore/sign/dist/external/fetch.js | 99 --- .../@sigstore/sign/dist/external/fulcio.js | 41 -- .../@sigstore/sign/dist/external/rekor.js | 80 --- .../@sigstore/sign/dist/external/tsa.js | 38 -- .../@sigstore/sign/dist/identity/ci.js | 73 --- .../@sigstore/sign/dist/identity/index.js | 20 - .../@sigstore/sign/dist/identity/provider.js | 2 - node_modules/@sigstore/sign/dist/index.js | 17 - .../@sigstore/sign/dist/signer/fulcio/ca.js | 60 -- .../sign/dist/signer/fulcio/ephemeral.js | 45 -- .../sign/dist/signer/fulcio/index.js | 87 --- .../@sigstore/sign/dist/signer/index.js | 22 - .../@sigstore/sign/dist/signer/signer.js | 17 - .../@sigstore/sign/dist/types/fetch.js | 2 - .../@sigstore/sign/dist/util/index.js | 49 -- node_modules/@sigstore/sign/dist/util/oidc.js | 31 - node_modules/@sigstore/sign/dist/util/ua.js | 33 - .../@sigstore/sign/dist/witness/index.js | 24 - .../sign/dist/witness/tlog/client.js | 61 -- .../@sigstore/sign/dist/witness/tlog/entry.js | 136 ---- .../@sigstore/sign/dist/witness/tlog/index.js | 82 --- .../@sigstore/sign/dist/witness/tsa/client.js | 43 -- .../@sigstore/sign/dist/witness/tsa/index.js | 44 -- .../@sigstore/sign/dist/witness/witness.js | 2 - .../node_modules/@npmcli/agent/lib/agents.js | 206 ------- .../node_modules/@npmcli/agent/lib/dns.js | 53 -- .../node_modules/@npmcli/agent/lib/errors.js | 61 -- .../node_modules/@npmcli/agent/lib/index.js | 56 -- .../node_modules/@npmcli/agent/lib/options.js | 86 --- .../node_modules/@npmcli/agent/lib/proxy.js | 88 --- .../node_modules/@npmcli/agent/package.json | 60 -- .../sign/node_modules/@npmcli/fs/LICENSE.md | 20 - .../@npmcli/fs/lib/common/get-options.js | 20 - .../@npmcli/fs/lib/common/node.js | 9 - .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 - .../node_modules/@npmcli/fs/lib/cp/errors.js | 129 ---- .../node_modules/@npmcli/fs/lib/cp/index.js | 22 - .../@npmcli/fs/lib/cp/polyfill.js | 428 ------------- .../sign/node_modules/@npmcli/fs/lib/index.js | 13 - .../node_modules/@npmcli/fs/lib/move-file.js | 78 --- .../@npmcli/fs/lib/readdir-scoped.js | 20 - .../@npmcli/fs/lib/with-temp-dir.js | 39 -- .../sign/node_modules/@npmcli/fs/package.json | 52 -- .../sign/node_modules/cacache/LICENSE.md | 16 - .../node_modules/cacache/lib/content/path.js | 29 - .../node_modules/cacache/lib/content/read.js | 165 ----- .../node_modules/cacache/lib/content/rm.js | 18 - .../node_modules/cacache/lib/content/write.js | 206 ------- .../node_modules/cacache/lib/entry-index.js | 336 ---------- .../sign/node_modules/cacache/lib/get.js | 170 ----- .../sign/node_modules/cacache/lib/index.js | 42 -- .../node_modules/cacache/lib/memoization.js | 72 --- .../sign/node_modules/cacache/lib/put.js | 80 --- .../sign/node_modules/cacache/lib/rm.js | 31 - .../node_modules/cacache/lib/util/glob.js | 7 - .../cacache/lib/util/hash-to-segments.js | 7 - .../sign/node_modules/cacache/lib/util/tmp.js | 26 - .../sign/node_modules/cacache/lib/verify.js | 257 -------- .../sign/node_modules/cacache/package.json | 82 --- .../node_modules/make-fetch-happen/LICENSE | 16 - .../make-fetch-happen/lib/cache/entry.js | 471 -------------- .../make-fetch-happen/lib/cache/errors.js | 11 - .../make-fetch-happen/lib/cache/index.js | 49 -- .../make-fetch-happen/lib/cache/key.js | 17 - .../make-fetch-happen/lib/cache/policy.js | 161 ----- .../make-fetch-happen/lib/fetch.js | 118 ---- .../make-fetch-happen/lib/index.js | 41 -- .../make-fetch-happen/lib/options.js | 54 -- .../make-fetch-happen/lib/pipeline.js | 41 -- .../make-fetch-happen/lib/remote.js | 131 ---- .../make-fetch-happen/package.json | 75 --- .../sign/node_modules/minipass-fetch/LICENSE | 28 - .../minipass-fetch/lib/abort-error.js | 17 - .../node_modules/minipass-fetch/lib/blob.js | 97 --- .../node_modules/minipass-fetch/lib/body.js | 350 ----------- .../minipass-fetch/lib/fetch-error.js | 32 - .../minipass-fetch/lib/headers.js | 267 -------- .../node_modules/minipass-fetch/lib/index.js | 377 ------------ .../minipass-fetch/lib/request.js | 282 --------- .../minipass-fetch/lib/response.js | 90 --- .../node_modules/minipass-fetch/package.json | 69 --- .../sign/node_modules/proc-log/LICENSE | 15 - .../sign/node_modules/proc-log/lib/index.js | 153 ----- .../sign/node_modules/proc-log/package.json | 45 -- .../sign/node_modules/ssri/LICENSE.md | 16 - .../sign/node_modules/ssri/lib/index.js | 580 ------------------ .../sign/node_modules/ssri/package.json | 65 -- .../sign/node_modules/unique-filename/LICENSE | 5 - .../node_modules/unique-filename/lib/index.js | 7 - .../node_modules/unique-filename/package.json | 51 -- .../sign/node_modules/unique-slug/LICENSE | 15 - .../node_modules/unique-slug/lib/index.js | 11 - .../node_modules/unique-slug/package.json | 47 -- node_modules/@sigstore/sign/package.json | 46 -- .../@sigstore/verify/dist/bundle/dsse.js | 43 -- .../@sigstore/verify/dist/bundle/index.js | 58 -- .../@sigstore/verify/dist/bundle/message.js | 36 -- node_modules/@sigstore/verify/dist/error.js | 32 - node_modules/@sigstore/verify/dist/index.js | 28 - .../@sigstore/verify/dist/key/certificate.js | 205 ------- .../@sigstore/verify/dist/key/index.js | 72 --- node_modules/@sigstore/verify/dist/key/sct.js | 79 --- node_modules/@sigstore/verify/dist/policy.js | 25 - .../@sigstore/verify/dist/shared.types.js | 2 - .../verify/dist/timestamp/checkpoint.js | 158 ----- .../@sigstore/verify/dist/timestamp/index.js | 47 -- .../@sigstore/verify/dist/timestamp/merkle.js | 105 ---- .../@sigstore/verify/dist/timestamp/set.js | 61 -- .../@sigstore/verify/dist/timestamp/tsa.js | 74 --- .../@sigstore/verify/dist/tlog/dsse.js | 58 -- .../verify/dist/tlog/hashedrekord.js | 52 -- .../@sigstore/verify/dist/tlog/index.js | 48 -- .../@sigstore/verify/dist/tlog/intoto.js | 63 -- .../@sigstore/verify/dist/trust/filter.js | 24 - .../@sigstore/verify/dist/trust/index.js | 84 --- .../verify/dist/trust/trust.types.js | 2 - .../@sigstore/verify/dist/verifier.js | 141 ----- node_modules/@sigstore/verify/package.json | 36 -- .../minizlib/dist/commonjs/constants.js | 123 ++++ node_modules/minizlib/dist/commonjs/index.js | 352 +++++++++++ .../minizlib/dist/commonjs/package.json | 3 + node_modules/minizlib/dist/esm/constants.js | 117 ++++ node_modules/minizlib/dist/esm/index.js | 333 ++++++++++ node_modules/minizlib/dist/esm/package.json | 3 + node_modules/sigstore/LICENSE | 202 ------ node_modules/sigstore/dist/config.js | 116 ---- node_modules/sigstore/dist/index.js | 34 - node_modules/sigstore/dist/sigstore.js | 103 ---- node_modules/sigstore/package.json | 47 -- package-lock.json | 177 ++++-- workspaces/libnpmpublish/lib/publish.js | 2 +- workspaces/libnpmpublish/package.json | 2 +- 176 files changed, 1065 insertions(+), 13181 deletions(-) delete mode 100644 node_modules/@sigstore/bundle/LICENSE delete mode 100644 node_modules/@sigstore/bundle/dist/build.js delete mode 100644 node_modules/@sigstore/bundle/dist/bundle.js delete mode 100644 node_modules/@sigstore/bundle/dist/error.js delete mode 100644 node_modules/@sigstore/bundle/dist/index.js delete mode 100644 node_modules/@sigstore/bundle/dist/serialized.js delete mode 100644 node_modules/@sigstore/bundle/dist/utility.js delete mode 100644 node_modules/@sigstore/bundle/dist/validate.js delete mode 100644 node_modules/@sigstore/bundle/package.json delete mode 100644 node_modules/@sigstore/core/LICENSE delete mode 100644 node_modules/@sigstore/core/dist/asn1/error.js delete mode 100644 node_modules/@sigstore/core/dist/asn1/index.js delete mode 100644 node_modules/@sigstore/core/dist/asn1/length.js delete mode 100644 node_modules/@sigstore/core/dist/asn1/obj.js delete mode 100644 node_modules/@sigstore/core/dist/asn1/parse.js delete mode 100644 node_modules/@sigstore/core/dist/asn1/tag.js delete mode 100644 node_modules/@sigstore/core/dist/crypto.js delete mode 100644 node_modules/@sigstore/core/dist/dsse.js delete mode 100644 node_modules/@sigstore/core/dist/encoding.js delete mode 100644 node_modules/@sigstore/core/dist/index.js delete mode 100644 node_modules/@sigstore/core/dist/json.js delete mode 100644 node_modules/@sigstore/core/dist/oid.js delete mode 100644 node_modules/@sigstore/core/dist/pem.js delete mode 100644 node_modules/@sigstore/core/dist/rfc3161/error.js delete mode 100644 node_modules/@sigstore/core/dist/rfc3161/index.js delete mode 100644 node_modules/@sigstore/core/dist/rfc3161/timestamp.js delete mode 100644 node_modules/@sigstore/core/dist/rfc3161/tstinfo.js delete mode 100644 node_modules/@sigstore/core/dist/stream.js delete mode 100644 node_modules/@sigstore/core/dist/x509/cert.js delete mode 100644 node_modules/@sigstore/core/dist/x509/ext.js delete mode 100644 node_modules/@sigstore/core/dist/x509/index.js delete mode 100644 node_modules/@sigstore/core/dist/x509/sct.js delete mode 100644 node_modules/@sigstore/core/package.json delete mode 100644 node_modules/@sigstore/sign/LICENSE delete mode 100644 node_modules/@sigstore/sign/dist/bundler/base.js delete mode 100644 node_modules/@sigstore/sign/dist/bundler/bundle.js delete mode 100644 node_modules/@sigstore/sign/dist/bundler/dsse.js delete mode 100644 node_modules/@sigstore/sign/dist/bundler/index.js delete mode 100644 node_modules/@sigstore/sign/dist/bundler/message.js delete mode 100644 node_modules/@sigstore/sign/dist/error.js delete mode 100644 node_modules/@sigstore/sign/dist/external/error.js delete mode 100644 node_modules/@sigstore/sign/dist/external/fetch.js delete mode 100644 node_modules/@sigstore/sign/dist/external/fulcio.js delete mode 100644 node_modules/@sigstore/sign/dist/external/rekor.js delete mode 100644 node_modules/@sigstore/sign/dist/external/tsa.js delete mode 100644 node_modules/@sigstore/sign/dist/identity/ci.js delete mode 100644 node_modules/@sigstore/sign/dist/identity/index.js delete mode 100644 node_modules/@sigstore/sign/dist/identity/provider.js delete mode 100644 node_modules/@sigstore/sign/dist/index.js delete mode 100644 node_modules/@sigstore/sign/dist/signer/fulcio/ca.js delete mode 100644 node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js delete mode 100644 node_modules/@sigstore/sign/dist/signer/fulcio/index.js delete mode 100644 node_modules/@sigstore/sign/dist/signer/index.js delete mode 100644 node_modules/@sigstore/sign/dist/signer/signer.js delete mode 100644 node_modules/@sigstore/sign/dist/types/fetch.js delete mode 100644 node_modules/@sigstore/sign/dist/util/index.js delete mode 100644 node_modules/@sigstore/sign/dist/util/oidc.js delete mode 100644 node_modules/@sigstore/sign/dist/util/ua.js delete mode 100644 node_modules/@sigstore/sign/dist/witness/index.js delete mode 100644 node_modules/@sigstore/sign/dist/witness/tlog/client.js delete mode 100644 node_modules/@sigstore/sign/dist/witness/tlog/entry.js delete mode 100644 node_modules/@sigstore/sign/dist/witness/tlog/index.js delete mode 100644 node_modules/@sigstore/sign/dist/witness/tsa/client.js delete mode 100644 node_modules/@sigstore/sign/dist/witness/tsa/index.js delete mode 100644 node_modules/@sigstore/sign/dist/witness/witness.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js delete mode 100644 node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/get.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/put.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js delete mode 100644 node_modules/@sigstore/sign/node_modules/cacache/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js delete mode 100644 node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/fetch-error.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js delete mode 100644 node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/proc-log/LICENSE delete mode 100644 node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/proc-log/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md delete mode 100644 node_modules/@sigstore/sign/node_modules/ssri/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/ssri/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE delete mode 100644 node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/unique-filename/package.json delete mode 100644 node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE delete mode 100644 node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js delete mode 100644 node_modules/@sigstore/sign/node_modules/unique-slug/package.json delete mode 100644 node_modules/@sigstore/sign/package.json delete mode 100644 node_modules/@sigstore/verify/dist/bundle/dsse.js delete mode 100644 node_modules/@sigstore/verify/dist/bundle/index.js delete mode 100644 node_modules/@sigstore/verify/dist/bundle/message.js delete mode 100644 node_modules/@sigstore/verify/dist/error.js delete mode 100644 node_modules/@sigstore/verify/dist/index.js delete mode 100644 node_modules/@sigstore/verify/dist/key/certificate.js delete mode 100644 node_modules/@sigstore/verify/dist/key/index.js delete mode 100644 node_modules/@sigstore/verify/dist/key/sct.js delete mode 100644 node_modules/@sigstore/verify/dist/policy.js delete mode 100644 node_modules/@sigstore/verify/dist/shared.types.js delete mode 100644 node_modules/@sigstore/verify/dist/timestamp/checkpoint.js delete mode 100644 node_modules/@sigstore/verify/dist/timestamp/index.js delete mode 100644 node_modules/@sigstore/verify/dist/timestamp/merkle.js delete mode 100644 node_modules/@sigstore/verify/dist/timestamp/set.js delete mode 100644 node_modules/@sigstore/verify/dist/timestamp/tsa.js delete mode 100644 node_modules/@sigstore/verify/dist/tlog/dsse.js delete mode 100644 node_modules/@sigstore/verify/dist/tlog/hashedrekord.js delete mode 100644 node_modules/@sigstore/verify/dist/tlog/index.js delete mode 100644 node_modules/@sigstore/verify/dist/tlog/intoto.js delete mode 100644 node_modules/@sigstore/verify/dist/trust/filter.js delete mode 100644 node_modules/@sigstore/verify/dist/trust/index.js delete mode 100644 node_modules/@sigstore/verify/dist/trust/trust.types.js delete mode 100644 node_modules/@sigstore/verify/dist/verifier.js delete mode 100644 node_modules/@sigstore/verify/package.json create mode 100644 node_modules/minizlib/dist/commonjs/constants.js create mode 100644 node_modules/minizlib/dist/commonjs/index.js create mode 100644 node_modules/minizlib/dist/commonjs/package.json create mode 100644 node_modules/minizlib/dist/esm/constants.js create mode 100644 node_modules/minizlib/dist/esm/index.js create mode 100644 node_modules/minizlib/dist/esm/package.json delete mode 100644 node_modules/sigstore/LICENSE delete mode 100644 node_modules/sigstore/dist/config.js delete mode 100644 node_modules/sigstore/dist/index.js delete mode 100644 node_modules/sigstore/dist/sigstore.js delete mode 100644 node_modules/sigstore/package.json diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index cb51908fc2283..2c26f44657369 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -321,6 +321,7 @@ graph LR; isaacs-cliui-->strip-ansi; isaacs-cliui-->wrap-ansi-cjs; isaacs-cliui-->wrap-ansi; + isaacs-fs-minipass-->minipass; jackspeak-->isaacs-cliui["@isaacs/cliui"]; jackspeak-->pkgjs-parseargs["@pkgjs/parseargs"]; libnpmaccess-->nock; @@ -766,6 +767,7 @@ graph LR; strip-ansi-->ansi-regex; tar-->chownr; tar-->fs-minipass; + tar-->isaacs-fs-minipass["@isaacs/fs-minipass"]; tar-->minipass; tar-->minizlib; tar-->mkdirp; @@ -799,9 +801,9 @@ packages higher up the chain. - @npmcli/arborist - @npmcli/metavuln-calculator - pacote, @npmcli/config, libnpmversion - - @npmcli/run-script, @npmcli/map-workspaces, libnpmhook, libnpmorg, libnpmsearch, libnpmteam, init-package-json, npm-profile + - @npmcli/map-workspaces, @npmcli/run-script, libnpmhook, libnpmorg, libnpmsearch, libnpmteam, init-package-json, npm-profile - @npmcli/package-json, npm-registry-fetch - @npmcli/git, make-fetch-happen - - npm-pick-manifest, @npmcli/installed-package-contents, cacache, promzard - - @npmcli/docs, npm-package-arg, npm-install-checks, npm-bundled, normalize-package-data, @npmcli/fs, unique-filename, npm-packlist, @npmcli/mock-globals, bin-links, nopt, parse-conflict-json, read-package-json-fast, read - - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/promise-spawn, ini, npm-normalize-package-bin, json-parse-even-better-errors, @npmcli/node-gyp, fs-minipass, ssri, unique-slug, @npmcli/redact, @npmcli/agent, minipass-fetch, @npmcli/name-from-folder, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, proggy, minify-registry-metadata, mute-stream, npm-audit-report, npm-user-validate + - @npmcli/installed-package-contents, npm-pick-manifest, cacache, promzard + - @npmcli/docs, @npmcli/fs, npm-bundled, npm-install-checks, npm-package-arg, normalize-package-data, unique-filename, npm-packlist, bin-links, nopt, parse-conflict-json, read-package-json-fast, @npmcli/mock-globals, read + - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, @npmcli/promise-spawn, ini, hosted-git-info, proc-log, validate-npm-package-name, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/node-gyp, @npmcli/redact, @npmcli/agent, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, proggy, minify-registry-metadata, mute-stream, npm-audit-report, npm-user-validate diff --git a/mock-registry/package.json b/mock-registry/package.json index 5620f0772f5f5..f4c1687e5714d 100644 --- a/mock-registry/package.json +++ b/mock-registry/package.json @@ -46,7 +46,7 @@ ] }, "devDependencies": { - "@npmcli/arborist": "^7.1.0", + "@npmcli/arborist": "^7.5.4", "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "json-stringify-safe": "^5.0.1", diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 6f2a5e2c034e8..66bb3a00dac78 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -49,25 +49,8 @@ !/@pkgjs/parseargs !/@sigstore/ /@sigstore/* -!/@sigstore/bundle -!/@sigstore/core !/@sigstore/protobuf-specs -!/@sigstore/sign -!/@sigstore/sign/node_modules/ -/@sigstore/sign/node_modules/* -!/@sigstore/sign/node_modules/@npmcli/ -/@sigstore/sign/node_modules/@npmcli/* -!/@sigstore/sign/node_modules/@npmcli/agent -!/@sigstore/sign/node_modules/@npmcli/fs -!/@sigstore/sign/node_modules/cacache -!/@sigstore/sign/node_modules/make-fetch-happen -!/@sigstore/sign/node_modules/minipass-fetch -!/@sigstore/sign/node_modules/proc-log -!/@sigstore/sign/node_modules/ssri -!/@sigstore/sign/node_modules/unique-filename -!/@sigstore/sign/node_modules/unique-slug !/@sigstore/tuf -!/@sigstore/verify !/@tufjs/ /@tufjs/* !/@tufjs/canonical-json @@ -250,7 +233,6 @@ !/shebang-command !/shebang-regex !/signal-exit -!/sigstore !/smart-buffer !/socks-proxy-agent !/socks diff --git a/node_modules/@sigstore/bundle/LICENSE b/node_modules/@sigstore/bundle/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/@sigstore/bundle/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/@sigstore/bundle/dist/build.js b/node_modules/@sigstore/bundle/dist/build.js deleted file mode 100644 index 65c71b100ad58..0000000000000 --- a/node_modules/@sigstore/bundle/dist/build.js +++ /dev/null @@ -1,101 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const bundle_1 = require("./bundle"); -// Message signature bundle - $case: 'messageSignature' -function toMessageSignatureBundle(options) { - return { - mediaType: options.singleCertificate - ? bundle_1.BUNDLE_V03_MEDIA_TYPE - : bundle_1.BUNDLE_V02_MEDIA_TYPE, - content: { - $case: 'messageSignature', - messageSignature: { - messageDigest: { - algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, - digest: options.digest, - }, - signature: options.signature, - }, - }, - verificationMaterial: toVerificationMaterial(options), - }; -} -exports.toMessageSignatureBundle = toMessageSignatureBundle; -// DSSE envelope bundle - $case: 'dsseEnvelope' -function toDSSEBundle(options) { - return { - mediaType: options.singleCertificate - ? bundle_1.BUNDLE_V03_MEDIA_TYPE - : bundle_1.BUNDLE_V02_MEDIA_TYPE, - content: { - $case: 'dsseEnvelope', - dsseEnvelope: toEnvelope(options), - }, - verificationMaterial: toVerificationMaterial(options), - }; -} -exports.toDSSEBundle = toDSSEBundle; -function toEnvelope(options) { - return { - payloadType: options.artifactType, - payload: options.artifact, - signatures: [toSignature(options)], - }; -} -function toSignature(options) { - return { - keyid: options.keyHint || '', - sig: options.signature, - }; -} -// Verification material -function toVerificationMaterial(options) { - return { - content: toKeyContent(options), - tlogEntries: [], - timestampVerificationData: { rfc3161Timestamps: [] }, - }; -} -function toKeyContent(options) { - if (options.certificate) { - if (options.singleCertificate) { - return { - $case: 'certificate', - certificate: { rawBytes: options.certificate }, - }; - } - else { - return { - $case: 'x509CertificateChain', - x509CertificateChain: { - certificates: [{ rawBytes: options.certificate }], - }, - }; - } - } - else { - return { - $case: 'publicKey', - publicKey: { - hint: options.keyHint || '', - }, - }; - } -} diff --git a/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/@sigstore/bundle/dist/bundle.js deleted file mode 100644 index dbd35df2ca2bb..0000000000000 --- a/node_modules/@sigstore/bundle/dist/bundle.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0; -exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; -exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2'; -exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3'; -exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json'; -// Type guards for bundle variants. -function isBundleWithCertificateChain(b) { - return b.verificationMaterial.content.$case === 'x509CertificateChain'; -} -exports.isBundleWithCertificateChain = isBundleWithCertificateChain; -function isBundleWithPublicKey(b) { - return b.verificationMaterial.content.$case === 'publicKey'; -} -exports.isBundleWithPublicKey = isBundleWithPublicKey; -function isBundleWithMessageSignature(b) { - return b.content.$case === 'messageSignature'; -} -exports.isBundleWithMessageSignature = isBundleWithMessageSignature; -function isBundleWithDsseEnvelope(b) { - return b.content.$case === 'dsseEnvelope'; -} -exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope; diff --git a/node_modules/@sigstore/bundle/dist/error.js b/node_modules/@sigstore/bundle/dist/error.js deleted file mode 100644 index f84295323b812..0000000000000 --- a/node_modules/@sigstore/bundle/dist/error.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ValidationError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class ValidationError extends Error { - constructor(message, fields) { - super(message); - this.fields = fields; - } -} -exports.ValidationError = ValidationError; diff --git a/node_modules/@sigstore/bundle/dist/index.js b/node_modules/@sigstore/bundle/dist/index.js deleted file mode 100644 index 1b012acad4d85..0000000000000 --- a/node_modules/@sigstore/bundle/dist/index.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var build_1 = require("./build"); -Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } }); -Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } }); -var bundle_1 = require("./bundle"); -Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } }); -Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } }); -Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } }); -Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } }); -Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } }); -Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } }); -Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } }); -Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } }); -var serialized_1 = require("./serialized"); -Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } }); -Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } }); -Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } }); -Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } }); -var validate_1 = require("./validate"); -Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } }); -Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } }); -Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } }); -Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } }); -Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } }); diff --git a/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/@sigstore/bundle/dist/serialized.js deleted file mode 100644 index be0d2a2d54d09..0000000000000 --- a/node_modules/@sigstore/bundle/dist/serialized.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const bundle_1 = require("./bundle"); -const validate_1 = require("./validate"); -const bundleFromJSON = (obj) => { - const bundle = protobuf_specs_1.Bundle.fromJSON(obj); - switch (bundle.mediaType) { - case bundle_1.BUNDLE_V01_MEDIA_TYPE: - (0, validate_1.assertBundleV01)(bundle); - break; - case bundle_1.BUNDLE_V02_MEDIA_TYPE: - (0, validate_1.assertBundleV02)(bundle); - break; - default: - (0, validate_1.assertBundleLatest)(bundle); - break; - } - return bundle; -}; -exports.bundleFromJSON = bundleFromJSON; -const bundleToJSON = (bundle) => { - return protobuf_specs_1.Bundle.toJSON(bundle); -}; -exports.bundleToJSON = bundleToJSON; -const envelopeFromJSON = (obj) => { - return protobuf_specs_1.Envelope.fromJSON(obj); -}; -exports.envelopeFromJSON = envelopeFromJSON; -const envelopeToJSON = (envelope) => { - return protobuf_specs_1.Envelope.toJSON(envelope); -}; -exports.envelopeToJSON = envelopeToJSON; diff --git a/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/@sigstore/bundle/dist/utility.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/@sigstore/bundle/dist/utility.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/@sigstore/bundle/dist/validate.js deleted file mode 100644 index 67079cd1f680a..0000000000000 --- a/node_modules/@sigstore/bundle/dist/validate.js +++ /dev/null @@ -1,199 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.assertBundleLatest = exports.assertBundleV02 = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("./error"); -// Performs basic validation of a Sigstore bundle to ensure that all required -// fields are populated. This is not a complete validation of the bundle, but -// rather a check that the bundle is in a valid state to be processed by the -// rest of the code. -function assertBundle(b) { - const invalidValues = validateBundleBase(b); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid bundle', invalidValues); - } -} -exports.assertBundle = assertBundle; -// Asserts that the given bundle conforms to the v0.1 bundle format. -function assertBundleV01(b) { - const invalidValues = []; - invalidValues.push(...validateBundleBase(b)); - invalidValues.push(...validateInclusionPromise(b)); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues); - } -} -exports.assertBundleV01 = assertBundleV01; -// Type guard to determine if Bundle is a v0.1 bundle. -function isBundleV01(b) { - try { - assertBundleV01(b); - return true; - } - catch (e) { - return false; - } -} -exports.isBundleV01 = isBundleV01; -// Asserts that the given bundle conforms to the v0.2 bundle format. -function assertBundleV02(b) { - const invalidValues = []; - invalidValues.push(...validateBundleBase(b)); - invalidValues.push(...validateInclusionProof(b)); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues); - } -} -exports.assertBundleV02 = assertBundleV02; -// Asserts that the given bundle conforms to the newest (0.3) bundle format. -function assertBundleLatest(b) { - const invalidValues = []; - invalidValues.push(...validateBundleBase(b)); - invalidValues.push(...validateInclusionProof(b)); - invalidValues.push(...validateNoCertificateChain(b)); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid bundle', invalidValues); - } -} -exports.assertBundleLatest = assertBundleLatest; -function validateBundleBase(b) { - const invalidValues = []; - // Media type validation - if (b.mediaType === undefined || - (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) && - !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) { - invalidValues.push('mediaType'); - } - // Content-related validation - if (b.content === undefined) { - invalidValues.push('content'); - } - else { - switch (b.content.$case) { - case 'messageSignature': - if (b.content.messageSignature.messageDigest === undefined) { - invalidValues.push('content.messageSignature.messageDigest'); - } - else { - if (b.content.messageSignature.messageDigest.digest.length === 0) { - invalidValues.push('content.messageSignature.messageDigest.digest'); - } - } - if (b.content.messageSignature.signature.length === 0) { - invalidValues.push('content.messageSignature.signature'); - } - break; - case 'dsseEnvelope': - if (b.content.dsseEnvelope.payload.length === 0) { - invalidValues.push('content.dsseEnvelope.payload'); - } - if (b.content.dsseEnvelope.signatures.length !== 1) { - invalidValues.push('content.dsseEnvelope.signatures'); - } - else { - if (b.content.dsseEnvelope.signatures[0].sig.length === 0) { - invalidValues.push('content.dsseEnvelope.signatures[0].sig'); - } - } - break; - } - } - // Verification material-related validation - if (b.verificationMaterial === undefined) { - invalidValues.push('verificationMaterial'); - } - else { - if (b.verificationMaterial.content === undefined) { - invalidValues.push('verificationMaterial.content'); - } - else { - switch (b.verificationMaterial.content.$case) { - case 'x509CertificateChain': - if (b.verificationMaterial.content.x509CertificateChain.certificates - .length === 0) { - invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates'); - } - b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => { - if (cert.rawBytes.length === 0) { - invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`); - } - }); - break; - case 'certificate': - if (b.verificationMaterial.content.certificate.rawBytes.length === 0) { - invalidValues.push('verificationMaterial.content.certificate.rawBytes'); - } - break; - } - } - if (b.verificationMaterial.tlogEntries === undefined) { - invalidValues.push('verificationMaterial.tlogEntries'); - } - else { - if (b.verificationMaterial.tlogEntries.length > 0) { - b.verificationMaterial.tlogEntries.forEach((entry, i) => { - if (entry.logId === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`); - } - if (entry.kindVersion === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`); - } - }); - } - } - } - return invalidValues; -} -// Necessary for V01 bundles -function validateInclusionPromise(b) { - const invalidValues = []; - if (b.verificationMaterial && - b.verificationMaterial.tlogEntries?.length > 0) { - b.verificationMaterial.tlogEntries.forEach((entry, i) => { - if (entry.inclusionPromise === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`); - } - }); - } - return invalidValues; -} -// Necessary for V02 and later bundles -function validateInclusionProof(b) { - const invalidValues = []; - if (b.verificationMaterial && - b.verificationMaterial.tlogEntries?.length > 0) { - b.verificationMaterial.tlogEntries.forEach((entry, i) => { - if (entry.inclusionProof === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`); - } - else { - if (entry.inclusionProof.checkpoint === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`); - } - } - }); - } - return invalidValues; -} -// Necessary for V03 and later bundles -function validateNoCertificateChain(b) { - const invalidValues = []; - if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') { - invalidValues.push('verificationMaterial.content.$case'); - } - return invalidValues; -} diff --git a/node_modules/@sigstore/bundle/package.json b/node_modules/@sigstore/bundle/package.json deleted file mode 100644 index dd853897226d2..0000000000000 --- a/node_modules/@sigstore/bundle/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "@sigstore/bundle", - "version": "2.3.2", - "description": "Sigstore bundle type", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist", - "store" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme", - "publishConfig": { - "provenance": true - }, - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } -} diff --git a/node_modules/@sigstore/core/LICENSE b/node_modules/@sigstore/core/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/@sigstore/core/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/@sigstore/core/dist/asn1/error.js deleted file mode 100644 index 17d93b0f7e706..0000000000000 --- a/node_modules/@sigstore/core/dist/asn1/error.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1TypeError = exports.ASN1ParseError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class ASN1ParseError extends Error { -} -exports.ASN1ParseError = ASN1ParseError; -class ASN1TypeError extends Error { -} -exports.ASN1TypeError = ASN1TypeError; diff --git a/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/@sigstore/core/dist/asn1/index.js deleted file mode 100644 index 348b2ea4022e5..0000000000000 --- a/node_modules/@sigstore/core/dist/asn1/index.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1Obj = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var obj_1 = require("./obj"); -Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } }); diff --git a/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/@sigstore/core/dist/asn1/length.js deleted file mode 100644 index 36fdaf5b9777f..0000000000000 --- a/node_modules/@sigstore/core/dist/asn1/length.js +++ /dev/null @@ -1,63 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.encodeLength = exports.decodeLength = void 0; -const error_1 = require("./error"); -// Decodes the length of a DER-encoded ANS.1 element from the supplied stream. -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes -function decodeLength(stream) { - const buf = stream.getUint8(); - // If the most significant bit is UNSET the length is just the value of the - // byte. - if ((buf & 0x80) === 0x00) { - return buf; - } - // Otherwise, the lower 7 bits of the first byte indicate the number of bytes - // that follow to encode the length. - const byteCount = buf & 0x7f; - // Ensure the encoded length can safely fit in a JS number. - if (byteCount > 6) { - throw new error_1.ASN1ParseError('length exceeds 6 byte limit'); - } - // Iterate over the bytes that encode the length. - let len = 0; - for (let i = 0; i < byteCount; i++) { - len = len * 256 + stream.getUint8(); - } - // This is a valid ASN.1 length encoding, but we don't support it. - if (len === 0) { - throw new error_1.ASN1ParseError('indefinite length encoding not supported'); - } - return len; -} -exports.decodeLength = decodeLength; -// Translates the supplied value to a DER-encoded length. -function encodeLength(len) { - if (len < 128) { - return Buffer.from([len]); - } - // Bitwise operations on large numbers are not supported in JS, so we need to - // use BigInts. - let val = BigInt(len); - const bytes = []; - while (val > 0n) { - bytes.unshift(Number(val & 255n)); - val = val >> 8n; - } - return Buffer.from([0x80 | bytes.length, ...bytes]); -} -exports.encodeLength = encodeLength; diff --git a/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/@sigstore/core/dist/asn1/obj.js deleted file mode 100644 index 5f9ac9cdbc493..0000000000000 --- a/node_modules/@sigstore/core/dist/asn1/obj.js +++ /dev/null @@ -1,152 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1Obj = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const stream_1 = require("../stream"); -const error_1 = require("./error"); -const length_1 = require("./length"); -const parse_1 = require("./parse"); -const tag_1 = require("./tag"); -class ASN1Obj { - constructor(tag, value, subs) { - this.tag = tag; - this.value = value; - this.subs = subs; - } - // Constructs an ASN.1 object from a Buffer of DER-encoded bytes. - static parseBuffer(buf) { - return parseStream(new stream_1.ByteStream(buf)); - } - toDER() { - const valueStream = new stream_1.ByteStream(); - if (this.subs.length > 0) { - for (const sub of this.subs) { - valueStream.appendView(sub.toDER()); - } - } - else { - valueStream.appendView(this.value); - } - const value = valueStream.buffer; - // Concat tag/length/value - const obj = new stream_1.ByteStream(); - obj.appendChar(this.tag.toDER()); - obj.appendView((0, length_1.encodeLength)(value.length)); - obj.appendView(value); - return obj.buffer; - } - ///////////////////////////////////////////////////////////////////////////// - // Convenience methods for parsing ASN.1 primitives into JS types - // Returns the ASN.1 object's value as a boolean. Throws an error if the - // object is not a boolean. - toBoolean() { - if (!this.tag.isBoolean()) { - throw new error_1.ASN1TypeError('not a boolean'); - } - return (0, parse_1.parseBoolean)(this.value); - } - // Returns the ASN.1 object's value as a BigInt. Throws an error if the - // object is not an integer. - toInteger() { - if (!this.tag.isInteger()) { - throw new error_1.ASN1TypeError('not an integer'); - } - return (0, parse_1.parseInteger)(this.value); - } - // Returns the ASN.1 object's value as an OID string. Throws an error if the - // object is not an OID. - toOID() { - if (!this.tag.isOID()) { - throw new error_1.ASN1TypeError('not an OID'); - } - return (0, parse_1.parseOID)(this.value); - } - // Returns the ASN.1 object's value as a Date. Throws an error if the object - // is not either a UTCTime or a GeneralizedTime. - toDate() { - switch (true) { - case this.tag.isUTCTime(): - return (0, parse_1.parseTime)(this.value, true); - case this.tag.isGeneralizedTime(): - return (0, parse_1.parseTime)(this.value, false); - default: - throw new error_1.ASN1TypeError('not a date'); - } - } - // Returns the ASN.1 object's value as a number[] where each number is the - // value of a bit in the bit string. Throws an error if the object is not a - // bit string. - toBitString() { - if (!this.tag.isBitString()) { - throw new error_1.ASN1TypeError('not a bit string'); - } - return (0, parse_1.parseBitString)(this.value); - } -} -exports.ASN1Obj = ASN1Obj; -///////////////////////////////////////////////////////////////////////////// -// Internal stream parsing functions -function parseStream(stream) { - // Parse tag, length, and value from stream - const tag = new tag_1.ASN1Tag(stream.getUint8()); - const len = (0, length_1.decodeLength)(stream); - const value = stream.slice(stream.position, len); - const start = stream.position; - let subs = []; - // If the object is constructed, parse its children. Sometimes, children - // are embedded in OCTESTRING objects, so we need to check those - // for children as well. - if (tag.constructed) { - subs = collectSubs(stream, len); - } - else if (tag.isOctetString()) { - // Attempt to parse children of OCTETSTRING objects. If anything fails, - // assume the object is not constructed and treat as primitive. - try { - subs = collectSubs(stream, len); - } - catch (e) { - // Fail silently and treat as primitive - } - } - // If there are no children, move stream cursor to the end of the object - if (subs.length === 0) { - stream.seek(start + len); - } - return new ASN1Obj(tag, value, subs); -} -function collectSubs(stream, len) { - // Calculate end of object content - const end = stream.position + len; - // Make sure there are enough bytes left in the stream. This should never - // happen, cause it'll get caught when the stream is sliced in parseStream. - // Leaving as an extra check just in case. - /* istanbul ignore if */ - if (end > stream.length) { - throw new error_1.ASN1ParseError('invalid length'); - } - // Parse all children - const subs = []; - while (stream.position < end) { - subs.push(parseStream(stream)); - } - // When we're done parsing children, we should be at the end of the object - if (stream.position !== end) { - throw new error_1.ASN1ParseError('invalid length'); - } - return subs; -} diff --git a/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/@sigstore/core/dist/asn1/parse.js deleted file mode 100644 index 482c7239e8316..0000000000000 --- a/node_modules/@sigstore/core/dist/asn1/parse.js +++ /dev/null @@ -1,125 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.parseBitString = exports.parseBoolean = exports.parseOID = exports.parseTime = exports.parseStringASCII = exports.parseInteger = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; -const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; -// Parse a BigInt from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer -function parseInteger(buf) { - let pos = 0; - const end = buf.length; - let val = buf[pos]; - const neg = val > 0x7f; - // Consume any padding bytes - const pad = neg ? 0xff : 0x00; - while (val == pad && ++pos < end) { - val = buf[pos]; - } - // Calculate remaining bytes to read - const len = end - pos; - if (len === 0) - return BigInt(neg ? -1 : 0); - // Handle two's complement for negative numbers - val = neg ? val - 256 : val; - // Parse remaining bytes - let n = BigInt(val); - for (let i = pos + 1; i < end; ++i) { - n = n * BigInt(256) + BigInt(buf[i]); - } - return n; -} -exports.parseInteger = parseInteger; -// Parse an ASCII string from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean -function parseStringASCII(buf) { - return buf.toString('ascii'); -} -exports.parseStringASCII = parseStringASCII; -// Parse a Date from the DER-encoded buffer -// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1 -function parseTime(buf, shortYear) { - const timeStr = parseStringASCII(buf); - // Parse the time string into matches - captured groups start at index 1 - const m = shortYear - ? RE_TIME_SHORT_YEAR.exec(timeStr) - : RE_TIME_LONG_YEAR.exec(timeStr); - if (!m) { - throw new Error('invalid time'); - } - // Translate dates with a 2-digit year to 4 digits per the spec - if (shortYear) { - let year = Number(m[1]); - year += year >= 50 ? 1900 : 2000; - m[1] = year.toString(); - } - // Translate to ISO8601 format and parse - return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`); -} -exports.parseTime = parseTime; -// Parse an OID from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier -function parseOID(buf) { - let pos = 0; - const end = buf.length; - // Consume first byte which encodes the first two OID components - let n = buf[pos++]; - const first = Math.floor(n / 40); - const second = n % 40; - let oid = `${first}.${second}`; - // Consume remaining bytes - let val = 0; - for (; pos < end; ++pos) { - n = buf[pos]; - val = (val << 7) + (n & 0x7f); - // If the left-most bit is NOT set, then this is the last byte in the - // sequence and we can add the value to the OID and reset the accumulator - if ((n & 0x80) === 0) { - oid += `.${val}`; - val = 0; - } - } - return oid; -} -exports.parseOID = parseOID; -// Parse a boolean from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean -function parseBoolean(buf) { - return buf[0] !== 0; -} -exports.parseBoolean = parseBoolean; -// Parse a bit string from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string -function parseBitString(buf) { - // First byte tell us how many unused bits are in the last byte - const unused = buf[0]; - const start = 1; - const end = buf.length; - const bits = []; - for (let i = start; i < end; ++i) { - const byte = buf[i]; - // The skip value is only used for the last byte - const skip = i === end - 1 ? unused : 0; - // Iterate over each bit in the byte (most significant first) - for (let j = 7; j >= skip; --j) { - // Read the bit and add it to the bit string - bits.push((byte >> j) & 0x01); - } - } - return bits; -} -exports.parseBitString = parseBitString; diff --git a/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/@sigstore/core/dist/asn1/tag.js deleted file mode 100644 index 84dd938d049aa..0000000000000 --- a/node_modules/@sigstore/core/dist/asn1/tag.js +++ /dev/null @@ -1,86 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1Tag = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("./error"); -const UNIVERSAL_TAG = { - BOOLEAN: 0x01, - INTEGER: 0x02, - BIT_STRING: 0x03, - OCTET_STRING: 0x04, - OBJECT_IDENTIFIER: 0x06, - SEQUENCE: 0x10, - SET: 0x11, - PRINTABLE_STRING: 0x13, - UTC_TIME: 0x17, - GENERALIZED_TIME: 0x18, -}; -const TAG_CLASS = { - UNIVERSAL: 0x00, - APPLICATION: 0x01, - CONTEXT_SPECIFIC: 0x02, - PRIVATE: 0x03, -}; -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes -class ASN1Tag { - constructor(enc) { - // Bits 0 through 4 are the tag number - this.number = enc & 0x1f; - // Bit 5 is the constructed bit - this.constructed = (enc & 0x20) === 0x20; - // Bit 6 & 7 are the class - this.class = enc >> 6; - if (this.number === 0x1f) { - throw new error_1.ASN1ParseError('long form tags not supported'); - } - if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) { - throw new error_1.ASN1ParseError('unsupported tag 0x00'); - } - } - isUniversal() { - return this.class === TAG_CLASS.UNIVERSAL; - } - isContextSpecific(num) { - const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC; - return num !== undefined ? res && this.number === num : res; - } - isBoolean() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN; - } - isInteger() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER; - } - isBitString() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING; - } - isOctetString() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING; - } - isOID() { - return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER); - } - isUTCTime() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME; - } - isGeneralizedTime() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME; - } - toDER() { - return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6); - } -} -exports.ASN1Tag = ASN1Tag; diff --git a/node_modules/@sigstore/core/dist/crypto.js b/node_modules/@sigstore/core/dist/crypto.js deleted file mode 100644 index dbe65b165d357..0000000000000 --- a/node_modules/@sigstore/core/dist/crypto.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.bufferEqual = exports.verify = exports.hash = exports.digest = exports.createPublicKey = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const crypto_1 = __importDefault(require("crypto")); -const SHA256_ALGORITHM = 'sha256'; -function createPublicKey(key, type = 'spki') { - if (typeof key === 'string') { - return crypto_1.default.createPublicKey(key); - } - else { - return crypto_1.default.createPublicKey({ key, format: 'der', type: type }); - } -} -exports.createPublicKey = createPublicKey; -function digest(algorithm, ...data) { - const hash = crypto_1.default.createHash(algorithm); - for (const d of data) { - hash.update(d); - } - return hash.digest(); -} -exports.digest = digest; -// TODO: deprecate this in favor of digest() -function hash(...data) { - const hash = crypto_1.default.createHash(SHA256_ALGORITHM); - for (const d of data) { - hash.update(d); - } - return hash.digest(); -} -exports.hash = hash; -function verify(data, key, signature, algorithm) { - // The try/catch is to work around an issue in Node 14.x where verify throws - // an error in some scenarios if the signature is invalid. - try { - return crypto_1.default.verify(algorithm, data, key, signature); - } - catch (e) { - /* istanbul ignore next */ - return false; - } -} -exports.verify = verify; -function bufferEqual(a, b) { - try { - return crypto_1.default.timingSafeEqual(a, b); - } - catch { - /* istanbul ignore next */ - return false; - } -} -exports.bufferEqual = bufferEqual; diff --git a/node_modules/@sigstore/core/dist/dsse.js b/node_modules/@sigstore/core/dist/dsse.js deleted file mode 100644 index a78783c919a25..0000000000000 --- a/node_modules/@sigstore/core/dist/dsse.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.preAuthEncoding = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const PAE_PREFIX = 'DSSEv1'; -// DSSE Pre-Authentication Encoding -function preAuthEncoding(payloadType, payload) { - const prefix = [ - PAE_PREFIX, - payloadType.length, - payloadType, - payload.length, - '', - ].join(' '); - return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]); -} -exports.preAuthEncoding = preAuthEncoding; diff --git a/node_modules/@sigstore/core/dist/encoding.js b/node_modules/@sigstore/core/dist/encoding.js deleted file mode 100644 index b020ac4d6ecd4..0000000000000 --- a/node_modules/@sigstore/core/dist/encoding.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.base64Decode = exports.base64Encode = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const BASE64_ENCODING = 'base64'; -const UTF8_ENCODING = 'utf-8'; -function base64Encode(str) { - return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING); -} -exports.base64Encode = base64Encode; -function base64Decode(str) { - return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING); -} -exports.base64Decode = base64Decode; diff --git a/node_modules/@sigstore/core/dist/index.js b/node_modules/@sigstore/core/dist/index.js deleted file mode 100644 index ac35e86a8df7d..0000000000000 --- a/node_modules/@sigstore/core/dist/index.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var asn1_1 = require("./asn1"); -Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } }); -exports.crypto = __importStar(require("./crypto")); -exports.dsse = __importStar(require("./dsse")); -exports.encoding = __importStar(require("./encoding")); -exports.json = __importStar(require("./json")); -exports.pem = __importStar(require("./pem")); -var rfc3161_1 = require("./rfc3161"); -Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } }); -var stream_1 = require("./stream"); -Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } }); -var x509_1 = require("./x509"); -Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } }); -Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } }); -Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } }); diff --git a/node_modules/@sigstore/core/dist/json.js b/node_modules/@sigstore/core/dist/json.js deleted file mode 100644 index a50df7233c7c5..0000000000000 --- a/node_modules/@sigstore/core/dist/json.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.canonicalize = void 0; -// JSON canonicalization per https://github.com/cyberphone/json-canonicalization -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function canonicalize(object) { - let buffer = ''; - if (object === null || typeof object !== 'object' || object.toJSON != null) { - // Primitives or toJSONable objects - buffer += JSON.stringify(object); - } - else if (Array.isArray(object)) { - // Array - maintain element order - buffer += '['; - let first = true; - object.forEach((element) => { - if (!first) { - buffer += ','; - } - first = false; - // recursive call - buffer += canonicalize(element); - }); - buffer += ']'; - } - else { - // Object - Sort properties before serializing - buffer += '{'; - let first = true; - Object.keys(object) - .sort() - .forEach((property) => { - if (!first) { - buffer += ','; - } - first = false; - buffer += JSON.stringify(property); - buffer += ':'; - // recursive call - buffer += canonicalize(object[property]); - }); - buffer += '}'; - } - return buffer; -} -exports.canonicalize = canonicalize; diff --git a/node_modules/@sigstore/core/dist/oid.js b/node_modules/@sigstore/core/dist/oid.js deleted file mode 100644 index ac7a643067ad0..0000000000000 --- a/node_modules/@sigstore/core/dist/oid.js +++ /dev/null @@ -1,14 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0; -exports.ECDSA_SIGNATURE_ALGOS = { - '1.2.840.10045.4.3.1': 'sha224', - '1.2.840.10045.4.3.2': 'sha256', - '1.2.840.10045.4.3.3': 'sha384', - '1.2.840.10045.4.3.4': 'sha512', -}; -exports.SHA2_HASH_ALGOS = { - '2.16.840.1.101.3.4.2.1': 'sha256', - '2.16.840.1.101.3.4.2.2': 'sha384', - '2.16.840.1.101.3.4.2.3': 'sha512', -}; diff --git a/node_modules/@sigstore/core/dist/pem.js b/node_modules/@sigstore/core/dist/pem.js deleted file mode 100644 index f35bc3835bbd1..0000000000000 --- a/node_modules/@sigstore/core/dist/pem.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fromDER = exports.toDER = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const PEM_HEADER = /-----BEGIN (.*)-----/; -const PEM_FOOTER = /-----END (.*)-----/; -function toDER(certificate) { - let der = ''; - certificate.split('\n').forEach((line) => { - if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) { - return; - } - der += line; - }); - return Buffer.from(der, 'base64'); -} -exports.toDER = toDER; -// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM -// encoding dictates that each certificate should have a trailing newline after -// the footer. -function fromDER(certificate, type = 'CERTIFICATE') { - // Base64-encode the certificate. - const der = certificate.toString('base64'); - // Split the certificate into lines of 64 characters. - const lines = der.match(/.{1,64}/g) || ''; - return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`] - .join('\n') - .concat('\n'); -} -exports.fromDER = fromDER; diff --git a/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/@sigstore/core/dist/rfc3161/error.js deleted file mode 100644 index b9b549b0bb323..0000000000000 --- a/node_modules/@sigstore/core/dist/rfc3161/error.js +++ /dev/null @@ -1,21 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RFC3161TimestampVerificationError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class RFC3161TimestampVerificationError extends Error { -} -exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError; diff --git a/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/@sigstore/core/dist/rfc3161/index.js deleted file mode 100644 index b77ecf1c7d50c..0000000000000 --- a/node_modules/@sigstore/core/dist/rfc3161/index.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RFC3161Timestamp = void 0; -var timestamp_1 = require("./timestamp"); -Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } }); diff --git a/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/@sigstore/core/dist/rfc3161/timestamp.js deleted file mode 100644 index 3e61fc1a4e169..0000000000000 --- a/node_modules/@sigstore/core/dist/rfc3161/timestamp.js +++ /dev/null @@ -1,201 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RFC3161Timestamp = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const asn1_1 = require("../asn1"); -const crypto = __importStar(require("../crypto")); -const oid_1 = require("../oid"); -const error_1 = require("./error"); -const tstinfo_1 = require("./tstinfo"); -const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2'; -const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4'; -const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4'; -class RFC3161Timestamp { - constructor(asn1) { - this.root = asn1; - } - static parse(der) { - const asn1 = asn1_1.ASN1Obj.parseBuffer(der); - return new RFC3161Timestamp(asn1); - } - get status() { - return this.pkiStatusInfoObj.subs[0].toInteger(); - } - get contentType() { - return this.contentTypeObj.toOID(); - } - get eContentType() { - return this.eContentTypeObj.toOID(); - } - get signingTime() { - return this.tstInfo.genTime; - } - get signerIssuer() { - return this.signerSidObj.subs[0].value; - } - get signerSerialNumber() { - return this.signerSidObj.subs[1].value; - } - get signerDigestAlgorithm() { - const oid = this.signerDigestAlgorithmObj.subs[0].toOID(); - return oid_1.SHA2_HASH_ALGOS[oid]; - } - get signatureAlgorithm() { - const oid = this.signatureAlgorithmObj.subs[0].toOID(); - return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; - } - get signatureValue() { - return this.signatureValueObj.value; - } - get tstInfo() { - // Need to unpack tstInfo from an OCTET STRING - return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]); - } - verify(data, publicKey) { - if (!this.timeStampTokenObj) { - throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing'); - } - // Check for expected ContentInfo content type - if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) { - throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`); - } - // Check for expected encapsulated content type - if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) { - throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`); - } - // Check that the tstInfo references the correct artifact - this.tstInfo.verify(data); - // Check that the signed message digest matches the tstInfo - this.verifyMessageDigest(); - // Check that the signature is valid for the signed attributes - this.verifySignature(publicKey); - } - verifyMessageDigest() { - // Check that the tstInfo matches the signed data - const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw); - const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value; - if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) { - throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo'); - } - } - verifySignature(key) { - // Encode the signed attributes for verification - const signedAttrs = this.signedAttrsObj.toDER(); - signedAttrs[0] = 0x31; // Change context-specific tag to SET - // Check that the signature is valid for the signed attributes - const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm); - if (!verified) { - throw new error_1.RFC3161TimestampVerificationError('signature verification failed'); - } - } - // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 - get pkiStatusInfoObj() { - // pkiStatusInfo is the first element of the timestamp response sequence - return this.root.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 - get timeStampTokenObj() { - // timeStampToken is the first element of the timestamp response sequence - return this.root.subs[1]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-3 - get contentTypeObj() { - return this.timeStampTokenObj.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc5652#section-3 - get signedDataObj() { - const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); - return obj.subs[0]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 - get encapContentInfoObj() { - return this.signedDataObj.subs[2]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 - get signerInfosObj() { - // SignerInfos is the last element of the signed data sequence - const sd = this.signedDataObj; - return sd.subs[sd.subs.length - 1]; - } - // https://www.rfc-editor.org/rfc/rfc5652#section-5.1 - get signerInfoObj() { - // Only supporting one signer - return this.signerInfosObj.subs[0]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 - get eContentTypeObj() { - return this.encapContentInfoObj.subs[0]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 - get eContentObj() { - return this.encapContentInfoObj.subs[1]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signedAttrsObj() { - const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); - return signedAttrs; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get messageDigestAttributeObj() { - const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() && - sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY); - return messageDigest; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signerSidObj() { - return this.signerInfoObj.subs[1]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signerDigestAlgorithmObj() { - // Signature is the 2nd element of the signerInfoObj object - return this.signerInfoObj.subs[2]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signatureAlgorithmObj() { - // Signature is the 4th element of the signerInfoObj object - return this.signerInfoObj.subs[4]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signatureValueObj() { - // Signature is the 6th element of the signerInfoObj object - return this.signerInfoObj.subs[5]; - } -} -exports.RFC3161Timestamp = RFC3161Timestamp; diff --git a/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js deleted file mode 100644 index dc8e4fb339383..0000000000000 --- a/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSTInfo = void 0; -const crypto = __importStar(require("../crypto")); -const oid_1 = require("../oid"); -const error_1 = require("./error"); -class TSTInfo { - constructor(asn1) { - this.root = asn1; - } - get version() { - return this.root.subs[0].toInteger(); - } - get genTime() { - return this.root.subs[4].toDate(); - } - get messageImprintHashAlgorithm() { - const oid = this.messageImprintObj.subs[0].subs[0].toOID(); - return oid_1.SHA2_HASH_ALGOS[oid]; - } - get messageImprintHashedMessage() { - return this.messageImprintObj.subs[1].value; - } - get raw() { - return this.root.toDER(); - } - verify(data) { - const digest = crypto.digest(this.messageImprintHashAlgorithm, data); - if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) { - throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact'); - } - } - // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 - get messageImprintObj() { - return this.root.subs[2]; - } -} -exports.TSTInfo = TSTInfo; diff --git a/node_modules/@sigstore/core/dist/stream.js b/node_modules/@sigstore/core/dist/stream.js deleted file mode 100644 index 0a24f8582eb23..0000000000000 --- a/node_modules/@sigstore/core/dist/stream.js +++ /dev/null @@ -1,115 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ByteStream = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class StreamError extends Error { -} -class ByteStream { - constructor(buffer) { - this.start = 0; - if (buffer) { - this.buf = buffer; - this.view = Buffer.from(buffer); - } - else { - this.buf = new ArrayBuffer(0); - this.view = Buffer.from(this.buf); - } - } - get buffer() { - return this.view.subarray(0, this.start); - } - get length() { - return this.view.byteLength; - } - get position() { - return this.start; - } - seek(position) { - this.start = position; - } - // Returns a Buffer containing the specified number of bytes starting at the - // given start position. - slice(start, len) { - const end = start + len; - if (end > this.length) { - throw new StreamError('request past end of buffer'); - } - return this.view.subarray(start, end); - } - appendChar(char) { - this.ensureCapacity(1); - this.view[this.start] = char; - this.start += 1; - } - appendUint16(num) { - this.ensureCapacity(2); - const value = new Uint16Array([num]); - const view = new Uint8Array(value.buffer); - this.view[this.start] = view[1]; - this.view[this.start + 1] = view[0]; - this.start += 2; - } - appendUint24(num) { - this.ensureCapacity(3); - const value = new Uint32Array([num]); - const view = new Uint8Array(value.buffer); - this.view[this.start] = view[2]; - this.view[this.start + 1] = view[1]; - this.view[this.start + 2] = view[0]; - this.start += 3; - } - appendView(view) { - this.ensureCapacity(view.length); - this.view.set(view, this.start); - this.start += view.length; - } - getBlock(size) { - if (size <= 0) { - return Buffer.alloc(0); - } - if (this.start + size > this.view.length) { - throw new Error('request past end of buffer'); - } - const result = this.view.subarray(this.start, this.start + size); - this.start += size; - return result; - } - getUint8() { - return this.getBlock(1)[0]; - } - getUint16() { - const block = this.getBlock(2); - return (block[0] << 8) | block[1]; - } - ensureCapacity(size) { - if (this.start + size > this.view.byteLength) { - const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0); - this.realloc(this.view.byteLength + blockSize); - } - } - realloc(size) { - const newArray = new ArrayBuffer(size); - const newView = Buffer.from(newArray); - // Copy the old buffer into the new one - newView.set(this.view); - this.buf = newArray; - this.view = newView; - } -} -exports.ByteStream = ByteStream; -ByteStream.BLOCK_SIZE = 1024; diff --git a/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/@sigstore/core/dist/x509/cert.js deleted file mode 100644 index 16c0c40d858d8..0000000000000 --- a/node_modules/@sigstore/core/dist/x509/cert.js +++ /dev/null @@ -1,226 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const asn1_1 = require("../asn1"); -const crypto = __importStar(require("../crypto")); -const oid_1 = require("../oid"); -const pem = __importStar(require("../pem")); -const ext_1 = require("./ext"); -const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14'; -const EXTENSION_OID_KEY_USAGE = '2.5.29.15'; -const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17'; -const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19'; -const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35'; -exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2'; -class X509Certificate { - constructor(asn1) { - this.root = asn1; - } - static parse(cert) { - const der = typeof cert === 'string' ? pem.toDER(cert) : cert; - const asn1 = asn1_1.ASN1Obj.parseBuffer(der); - return new X509Certificate(asn1); - } - get tbsCertificate() { - return this.tbsCertificateObj; - } - get version() { - // version number is the first element of the version context specific tag - const ver = this.versionObj.subs[0].toInteger(); - return `v${(ver + BigInt(1)).toString()}`; - } - get serialNumber() { - return this.serialNumberObj.value; - } - get notBefore() { - // notBefore is the first element of the validity sequence - return this.validityObj.subs[0].toDate(); - } - get notAfter() { - // notAfter is the second element of the validity sequence - return this.validityObj.subs[1].toDate(); - } - get issuer() { - return this.issuerObj.value; - } - get subject() { - return this.subjectObj.value; - } - get publicKey() { - return this.subjectPublicKeyInfoObj.toDER(); - } - get signatureAlgorithm() { - const oid = this.signatureAlgorithmObj.subs[0].toOID(); - return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; - } - get signatureValue() { - // Signature value is a bit string, so we need to skip the first byte - return this.signatureValueObj.value.subarray(1); - } - get subjectAltName() { - const ext = this.extSubjectAltName; - return ext?.uri || ext?.rfc822Name; - } - get extensions() { - // The extension list is the first (and only) element of the extensions - // context specific tag - const extSeq = this.extensionsObj?.subs[0]; - return extSeq?.subs || /* istanbul ignore next */ []; - } - get extKeyUsage() { - const ext = this.findExtension(EXTENSION_OID_KEY_USAGE); - return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined; - } - get extBasicConstraints() { - const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS); - return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined; - } - get extSubjectAltName() { - const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME); - return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined; - } - get extAuthorityKeyID() { - const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID); - return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined; - } - get extSubjectKeyID() { - const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID); - return ext - ? new ext_1.X509SubjectKeyIDExtension(ext) - : /* istanbul ignore next */ undefined; - } - get extSCT() { - const ext = this.findExtension(exports.EXTENSION_OID_SCT); - return ext ? new ext_1.X509SCTExtension(ext) : undefined; - } - get isCA() { - const ca = this.extBasicConstraints?.isCA || false; - // If the KeyUsage extension is present, keyCertSign must be set - if (this.extKeyUsage) { - ca && this.extKeyUsage.keyCertSign; - } - return ca; - } - extension(oid) { - const ext = this.findExtension(oid); - return ext ? new ext_1.X509Extension(ext) : undefined; - } - verify(issuerCertificate) { - // Use the issuer's public key if provided, otherwise use the subject's - const publicKey = issuerCertificate?.publicKey || this.publicKey; - const key = crypto.createPublicKey(publicKey); - return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm); - } - validForDate(date) { - return this.notBefore <= date && date <= this.notAfter; - } - equals(other) { - return this.root.toDER().equals(other.root.toDER()); - } - // Creates a copy of the certificate with a new buffer - clone() { - const der = this.root.toDER(); - const clone = Buffer.alloc(der.length); - der.copy(clone); - return X509Certificate.parse(clone); - } - findExtension(oid) { - // Find the extension with the given OID. The OID will always be the first - // element of the extension sequence - return this.extensions.find((ext) => ext.subs[0].toOID() === oid); - } - ///////////////////////////////////////////////////////////////////////////// - // The following properties use the documented x509 structure to locate the - // desired ASN.1 object - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1 - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1 - get tbsCertificateObj() { - // tbsCertificate is the first element of the certificate sequence - return this.root.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2 - get signatureAlgorithmObj() { - // signatureAlgorithm is the second element of the certificate sequence - return this.root.subs[1]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3 - get signatureValueObj() { - // signatureValue is the third element of the certificate sequence - return this.root.subs[2]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1 - get versionObj() { - // version is the first element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2 - get serialNumberObj() { - // serialNumber is the second element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[1]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4 - get issuerObj() { - // issuer is the fourth element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[3]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5 - get validityObj() { - // version is the fifth element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[4]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6 - get subjectObj() { - // subject is the sixth element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[5]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7 - get subjectPublicKeyInfoObj() { - // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[6]; - } - // Extensions can't be located by index because their position varies. Instead, - // we need to find the extensions context specific tag - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9 - get extensionsObj() { - return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03)); - } -} -exports.X509Certificate = X509Certificate; diff --git a/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/@sigstore/core/dist/x509/ext.js deleted file mode 100644 index 1d481261b0aa6..0000000000000 --- a/node_modules/@sigstore/core/dist/x509/ext.js +++ /dev/null @@ -1,145 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0; -const stream_1 = require("../stream"); -const sct_1 = require("./sct"); -// https://www.rfc-editor.org/rfc/rfc5280#section-4.1 -class X509Extension { - constructor(asn1) { - this.root = asn1; - } - get oid() { - return this.root.subs[0].toOID(); - } - get critical() { - // The critical field is optional and will be the second element of the - // extension sequence if present. Default to false if not present. - return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false; - } - get value() { - return this.extnValueObj.value; - } - get valueObj() { - return this.extnValueObj; - } - get extnValueObj() { - // The extnValue field will be the last element of the extension sequence - return this.root.subs[this.root.subs.length - 1]; - } -} -exports.X509Extension = X509Extension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9 -class X509BasicConstraintsExtension extends X509Extension { - get isCA() { - return this.sequence.subs[0]?.toBoolean() ?? false; - } - get pathLenConstraint() { - return this.sequence.subs.length > 1 - ? this.sequence.subs[1].toInteger() - : undefined; - } - // The extnValue field contains a single sequence wrapping the isCA and - // pathLenConstraint. - get sequence() { - return this.extnValueObj.subs[0]; - } -} -exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3 -class X509KeyUsageExtension extends X509Extension { - get digitalSignature() { - return this.bitString[0] === 1; - } - get keyCertSign() { - return this.bitString[5] === 1; - } - get crlSign() { - return this.bitString[6] === 1; - } - // The extnValue field contains a single bit string which is a bit mask - // indicating which key usages are enabled. - get bitString() { - return this.extnValueObj.subs[0].toBitString(); - } -} -exports.X509KeyUsageExtension = X509KeyUsageExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6 -class X509SubjectAlternativeNameExtension extends X509Extension { - get rfc822Name() { - return this.findGeneralName(0x01)?.value.toString('ascii'); - } - get uri() { - return this.findGeneralName(0x06)?.value.toString('ascii'); - } - // Retrieve the value of an otherName with the given OID. - otherName(oid) { - const otherName = this.findGeneralName(0x00); - if (otherName === undefined) { - return undefined; - } - // The otherName is a sequence containing an OID and a value. - // Need to check that the OID matches the one we're looking for. - const otherNameOID = otherName.subs[0].toOID(); - if (otherNameOID !== oid) { - return undefined; - } - // The otherNameValue is a sequence containing the actual value. - const otherNameValue = otherName.subs[1]; - return otherNameValue.subs[0].value.toString('ascii'); - } - findGeneralName(tag) { - return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag)); - } - // The extnValue field contains a sequence of GeneralNames. - get generalNames() { - return this.extnValueObj.subs[0].subs; - } -} -exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1 -class X509AuthorityKeyIDExtension extends X509Extension { - get keyIdentifier() { - return this.findSequenceMember(0x00)?.value; - } - findSequenceMember(tag) { - return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag)); - } - // The extnValue field contains a single sequence wrapping the keyIdentifier - get sequence() { - return this.extnValueObj.subs[0]; - } -} -exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2 -class X509SubjectKeyIDExtension extends X509Extension { - get keyIdentifier() { - return this.extnValueObj.subs[0].value; - } -} -exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension; -// https://www.rfc-editor.org/rfc/rfc6962#section-3.3 -class X509SCTExtension extends X509Extension { - constructor(asn1) { - super(asn1); - } - get signedCertificateTimestamps() { - const buf = this.extnValueObj.subs[0].value; - const stream = new stream_1.ByteStream(buf); - // The overall list length is encoded in the first two bytes -- note this - // is the length of the list in bytes, NOT the number of SCTs in the list - const end = stream.getUint16() + 2; - const sctList = []; - while (stream.position < end) { - // Read the length of the next SCT - const sctLength = stream.getUint16(); - // Slice out the bytes for the next SCT and parse it - const sct = stream.getBlock(sctLength); - sctList.push(sct_1.SignedCertificateTimestamp.parse(sct)); - } - if (stream.position !== end) { - throw new Error('SCT list length does not match actual length'); - } - return sctList; - } -} -exports.X509SCTExtension = X509SCTExtension; diff --git a/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/@sigstore/core/dist/x509/index.js deleted file mode 100644 index cdd77e58f37d5..0000000000000 --- a/node_modules/@sigstore/core/dist/x509/index.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; -var cert_1 = require("./cert"); -Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } }); -Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } }); -var ext_1 = require("./ext"); -Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } }); diff --git a/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/@sigstore/core/dist/x509/sct.js deleted file mode 100644 index 1603059c0d1ac..0000000000000 --- a/node_modules/@sigstore/core/dist/x509/sct.js +++ /dev/null @@ -1,141 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SignedCertificateTimestamp = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const crypto = __importStar(require("../crypto")); -const stream_1 = require("../stream"); -class SignedCertificateTimestamp { - constructor(options) { - this.version = options.version; - this.logID = options.logID; - this.timestamp = options.timestamp; - this.extensions = options.extensions; - this.hashAlgorithm = options.hashAlgorithm; - this.signatureAlgorithm = options.signatureAlgorithm; - this.signature = options.signature; - } - get datetime() { - return new Date(Number(this.timestamp.readBigInt64BE())); - } - // Returns the hash algorithm used to generate the SCT's signature. - // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 - get algorithm() { - switch (this.hashAlgorithm) { - /* istanbul ignore next */ - case 0: - return 'none'; - /* istanbul ignore next */ - case 1: - return 'md5'; - /* istanbul ignore next */ - case 2: - return 'sha1'; - /* istanbul ignore next */ - case 3: - return 'sha224'; - case 4: - return 'sha256'; - /* istanbul ignore next */ - case 5: - return 'sha384'; - /* istanbul ignore next */ - case 6: - return 'sha512'; - /* istanbul ignore next */ - default: - return 'unknown'; - } - } - verify(preCert, key) { - // Assemble the digitally-signed struct (the data over which the signature - // was generated). - // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 - const stream = new stream_1.ByteStream(); - stream.appendChar(this.version); - stream.appendChar(0x00); // SignatureType = certificate_timestamp(0) - stream.appendView(this.timestamp); - stream.appendUint16(0x01); // LogEntryType = precert_entry(1) - stream.appendView(preCert); - stream.appendUint16(this.extensions.byteLength); - /* istanbul ignore next - extensions are very uncommon */ - if (this.extensions.byteLength > 0) { - stream.appendView(this.extensions); - } - return crypto.verify(stream.buffer, key, this.signature, this.algorithm); - } - // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using - // TLS encoding which means the fields and lengths of most fields are - // specified as part of the SCT and TLS specs. - // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 - // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 - static parse(buf) { - const stream = new stream_1.ByteStream(buf); - // Version - enum { v1(0), (255) } - const version = stream.getUint8(); - // Log ID - struct { opaque key_id[32]; } - const logID = stream.getBlock(32); - // Timestamp - uint64 - const timestamp = stream.getBlock(8); - // Extensions - opaque extensions<0..2^16-1>; - const extenstionLength = stream.getUint16(); - const extensions = stream.getBlock(extenstionLength); - // Hash algo - enum { sha256(4), . . . (255) } - const hashAlgorithm = stream.getUint8(); - // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) } - const signatureAlgorithm = stream.getUint8(); - // Signature - opaque signature<0..2^16-1>; - const sigLength = stream.getUint16(); - const signature = stream.getBlock(sigLength); - // Check that we read the entire buffer - if (stream.position !== buf.length) { - throw new Error('SCT buffer length mismatch'); - } - return new SignedCertificateTimestamp({ - version, - logID, - timestamp, - extensions, - hashAlgorithm, - signatureAlgorithm, - signature, - }); - } -} -exports.SignedCertificateTimestamp = SignedCertificateTimestamp; diff --git a/node_modules/@sigstore/core/package.json b/node_modules/@sigstore/core/package.json deleted file mode 100644 index 621ff1715bcd1..0000000000000 --- a/node_modules/@sigstore/core/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@sigstore/core", - "version": "1.1.0", - "description": "Base library for Sigstore", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme", - "publishConfig": { - "provenance": true - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } -} diff --git a/node_modules/@sigstore/sign/LICENSE b/node_modules/@sigstore/sign/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/@sigstore/sign/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/@sigstore/sign/dist/bundler/base.js deleted file mode 100644 index 61d5eba4568a3..0000000000000 --- a/node_modules/@sigstore/sign/dist/bundler/base.js +++ /dev/null @@ -1,50 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BaseBundleBuilder = void 0; -// BaseBundleBuilder is a base class for BundleBuilder implementations. It -// provides a the basic wokflow for signing and witnessing an artifact. -// Subclasses must implement the `package` method to assemble a valid bundle -// with the generated signature and verification material. -class BaseBundleBuilder { - constructor(options) { - this.signer = options.signer; - this.witnesses = options.witnesses; - } - // Executes the signing/witnessing process for the given artifact. - async create(artifact) { - const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob)); - const bundle = await this.package(artifact, signature); - // Invoke all of the witnesses in parallel - const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key)))); - // Collect the verification material from all of the witnesses - const tlogEntryList = []; - const timestampList = []; - verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => { - tlogEntryList.push(...(tlogEntries ?? [])); - timestampList.push(...(rfc3161Timestamps ?? [])); - }); - // Merge the collected verification material into the bundle - bundle.verificationMaterial.tlogEntries = tlogEntryList; - bundle.verificationMaterial.timestampVerificationData = { - rfc3161Timestamps: timestampList, - }; - return bundle; - } - // Override this function to apply any pre-signing transformations to the - // artifact. The returned buffer will be signed by the signer. The default - // implementation simply returns the artifact data. - async prepare(artifact) { - return artifact.data; - } -} -exports.BaseBundleBuilder = BaseBundleBuilder; -// Extracts the public key from a KeyMaterial. Returns either the public key -// or the certificate, depending on the type of key material. -function publicKey(key) { - switch (key.$case) { - case 'publicKey': - return key.publicKey; - case 'x509Certificate': - return key.certificate; - } -} diff --git a/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/@sigstore/sign/dist/bundler/bundle.js deleted file mode 100644 index 7c2ca9164f0df..0000000000000 --- a/node_modules/@sigstore/sign/dist/bundler/bundle.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const sigstore = __importStar(require("@sigstore/bundle")); -const util_1 = require("../util"); -// Helper functions for assembling the parts of a Sigstore bundle -// Message signature bundle - $case: 'messageSignature' -function toMessageSignatureBundle(artifact, signature) { - const digest = util_1.crypto.hash(artifact.data); - return sigstore.toMessageSignatureBundle({ - digest, - signature: signature.signature, - certificate: signature.key.$case === 'x509Certificate' - ? util_1.pem.toDER(signature.key.certificate) - : undefined, - keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, - }); -} -exports.toMessageSignatureBundle = toMessageSignatureBundle; -// DSSE envelope bundle - $case: 'dsseEnvelope' -function toDSSEBundle(artifact, signature, singleCertificate) { - return sigstore.toDSSEBundle({ - artifact: artifact.data, - artifactType: artifact.type, - signature: signature.signature, - certificate: signature.key.$case === 'x509Certificate' - ? util_1.pem.toDER(signature.key.certificate) - : undefined, - keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, - singleCertificate, - }); -} -exports.toDSSEBundle = toDSSEBundle; diff --git a/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/@sigstore/sign/dist/bundler/dsse.js deleted file mode 100644 index 621700df93842..0000000000000 --- a/node_modules/@sigstore/sign/dist/bundler/dsse.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DSSEBundleBuilder = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const util_1 = require("../util"); -const base_1 = require("./base"); -const bundle_1 = require("./bundle"); -// BundleBuilder implementation for DSSE wrapped attestations -class DSSEBundleBuilder extends base_1.BaseBundleBuilder { - constructor(options) { - super(options); - this.singleCertificate = options.singleCertificate ?? false; - } - // DSSE requires the artifact to be pre-encoded with the payload type - // before the signature is generated. - async prepare(artifact) { - const a = artifactDefaults(artifact); - return util_1.dsse.preAuthEncoding(a.type, a.data); - } - // Packages the artifact and signature into a DSSE bundle - async package(artifact, signature) { - return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.singleCertificate); - } -} -exports.DSSEBundleBuilder = DSSEBundleBuilder; -// Defaults the artifact type to an empty string if not provided -function artifactDefaults(artifact) { - return { - ...artifact, - type: artifact.type ?? '', - }; -} diff --git a/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/@sigstore/sign/dist/bundler/index.js deleted file mode 100644 index d67c8c324a4f0..0000000000000 --- a/node_modules/@sigstore/sign/dist/bundler/index.js +++ /dev/null @@ -1,7 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; -var dsse_1 = require("./dsse"); -Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } }); -var message_1 = require("./message"); -Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } }); diff --git a/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/@sigstore/sign/dist/bundler/message.js deleted file mode 100644 index e3991f42bab93..0000000000000 --- a/node_modules/@sigstore/sign/dist/bundler/message.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MessageSignatureBundleBuilder = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const base_1 = require("./base"); -const bundle_1 = require("./bundle"); -// BundleBuilder implementation for raw message signatures -class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder { - constructor(options) { - super(options); - } - async package(artifact, signature) { - return (0, bundle_1.toMessageSignatureBundle)(artifact, signature); - } -} -exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder; diff --git a/node_modules/@sigstore/sign/dist/error.js b/node_modules/@sigstore/sign/dist/error.js deleted file mode 100644 index d57e4567fb89e..0000000000000 --- a/node_modules/@sigstore/sign/dist/error.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.internalError = exports.InternalError = void 0; -const error_1 = require("./external/error"); -class InternalError extends Error { - constructor({ code, message, cause, }) { - super(message); - this.name = this.constructor.name; - this.cause = cause; - this.code = code; - } -} -exports.InternalError = InternalError; -function internalError(err, code, message) { - if (err instanceof error_1.HTTPError) { - message += ` - ${err.message}`; - } - throw new InternalError({ - code: code, - message: message, - cause: err, - }); -} -exports.internalError = internalError; diff --git a/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/@sigstore/sign/dist/external/error.js deleted file mode 100644 index a6a65adebb176..0000000000000 --- a/node_modules/@sigstore/sign/dist/external/error.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.HTTPError = void 0; -class HTTPError extends Error { - constructor({ status, message, location, }) { - super(`(${status}) ${message}`); - this.statusCode = status; - this.location = location; - } -} -exports.HTTPError = HTTPError; diff --git a/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/@sigstore/sign/dist/external/fetch.js deleted file mode 100644 index b2d81bde7be16..0000000000000 --- a/node_modules/@sigstore/sign/dist/external/fetch.js +++ /dev/null @@ -1,99 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fetchWithRetry = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const http2_1 = require("http2"); -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); -const proc_log_1 = require("proc-log"); -const promise_retry_1 = __importDefault(require("promise-retry")); -const util_1 = require("../util"); -const error_1 = require("./error"); -const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants; -async function fetchWithRetry(url, options) { - return (0, promise_retry_1.default)(async (retry, attemptNum) => { - const method = options.method || 'POST'; - const headers = { - [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(), - ...options.headers, - }; - const response = await (0, make_fetch_happen_1.default)(url, { - method, - headers, - body: options.body, - timeout: options.timeout, - retry: false, // We're handling retries ourselves - }).catch((reason) => { - proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`); - return retry(reason); - }); - if (response.ok) { - return response; - } - else { - const error = await errorFromResponse(response); - proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`); - if (retryable(response.status)) { - return retry(error); - } - else { - throw error; - } - } - }, retryOpts(options.retry)); -} -exports.fetchWithRetry = fetchWithRetry; -// Translate a Response into an HTTPError instance. This will attempt to parse -// the response body for a message, but will default to the statusText if none -// is found. -const errorFromResponse = async (response) => { - let message = response.statusText; - const location = response.headers?.get(HTTP2_HEADER_LOCATION) || undefined; - const contentType = response.headers?.get(HTTP2_HEADER_CONTENT_TYPE); - // If response type is JSON, try to parse the body for a message - if (contentType?.includes('application/json')) { - try { - const body = await response.json(); - message = body.message || message; - } - catch (e) { - // ignore - } - } - return new error_1.HTTPError({ - status: response.status, - message: message, - location: location, - }); -}; -// Determine if a status code is retryable. This includes 5xx errors, 408, and -// 429. -const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR; -// Normalize the retry options to the format expected by promise-retry -const retryOpts = (retry) => { - if (typeof retry === 'boolean') { - return { retries: retry ? 1 : 0 }; - } - else if (typeof retry === 'number') { - return { retries: retry }; - } - else { - return { retries: 0, ...retry }; - } -}; diff --git a/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/@sigstore/sign/dist/external/fulcio.js deleted file mode 100644 index de6a1ad9f9e79..0000000000000 --- a/node_modules/@sigstore/sign/dist/external/fulcio.js +++ /dev/null @@ -1,41 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Fulcio = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fetch_1 = require("./fetch"); -/** - * Fulcio API client. - */ -class Fulcio { - constructor(options) { - this.options = options; - } - async createSigningCertificate(request) { - const { baseURL, retry, timeout } = this.options; - const url = `${baseURL}/api/v2/signingCert`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(request), - timeout, - retry, - }); - return response.json(); - } -} -exports.Fulcio = Fulcio; diff --git a/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/@sigstore/sign/dist/external/rekor.js deleted file mode 100644 index bb59a126e032f..0000000000000 --- a/node_modules/@sigstore/sign/dist/external/rekor.js +++ /dev/null @@ -1,80 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Rekor = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fetch_1 = require("./fetch"); -/** - * Rekor API client. - */ -class Rekor { - constructor(options) { - this.options = options; - } - /** - * Create a new entry in the Rekor log. - * @param propsedEntry {ProposedEntry} Data to create a new entry - * @returns {Promise} The created entry - */ - async createEntry(propsedEntry) { - const { baseURL, timeout, retry } = this.options; - const url = `${baseURL}/api/v1/log/entries`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - headers: { - 'Content-Type': 'application/json', - Accept: 'application/json', - }, - body: JSON.stringify(propsedEntry), - timeout, - retry, - }); - const data = await response.json(); - return entryFromResponse(data); - } - /** - * Get an entry from the Rekor log. - * @param uuid {string} The UUID of the entry to retrieve - * @returns {Promise} The retrieved entry - */ - async getEntry(uuid) { - const { baseURL, timeout, retry } = this.options; - const url = `${baseURL}/api/v1/log/entries/${uuid}`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - method: 'GET', - headers: { - Accept: 'application/json', - }, - timeout, - retry, - }); - const data = await response.json(); - return entryFromResponse(data); - } -} -exports.Rekor = Rekor; -// Unpack the response from the Rekor API into a more convenient format. -function entryFromResponse(data) { - const entries = Object.entries(data); - if (entries.length != 1) { - throw new Error('Received multiple entries in Rekor response'); - } - // Grab UUID and entry data from the response - const [uuid, entry] = entries[0]; - return { - ...entry, - uuid, - }; -} diff --git a/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/@sigstore/sign/dist/external/tsa.js deleted file mode 100644 index a948ba9cca2c7..0000000000000 --- a/node_modules/@sigstore/sign/dist/external/tsa.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TimestampAuthority = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fetch_1 = require("./fetch"); -class TimestampAuthority { - constructor(options) { - this.options = options; - } - async createTimestamp(request) { - const { baseURL, timeout, retry } = this.options; - const url = `${baseURL}/api/v1/timestamp`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(request), - timeout, - retry, - }); - return response.buffer(); - } -} -exports.TimestampAuthority = TimestampAuthority; diff --git a/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/@sigstore/sign/dist/identity/ci.js deleted file mode 100644 index d79133952b605..0000000000000 --- a/node_modules/@sigstore/sign/dist/identity/ci.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CIContextProvider = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); -// Collection of all the CI-specific providers we have implemented -const providers = [getGHAToken, getEnv]; -/** - * CIContextProvider is a composite identity provider which will iterate - * over all of the CI-specific providers and return the token from the first - * one that resolves. - */ -class CIContextProvider { - /* istanbul ignore next */ - constructor(audience = 'sigstore') { - this.audience = audience; - } - // Invoke all registered ProviderFuncs and return the value of whichever one - // resolves first. - async getToken() { - return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available')); - } -} -exports.CIContextProvider = CIContextProvider; -/** - * getGHAToken can retrieve an OIDC token when running in a GitHub Actions - * workflow - */ -async function getGHAToken(audience) { - // Check to see if we're running in GitHub Actions - if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL || - !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) { - return Promise.reject('no token available'); - } - // Construct URL to request token w/ appropriate audience - const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL); - url.searchParams.append('audience', audience); - const response = await (0, make_fetch_happen_1.default)(url.href, { - retry: 2, - headers: { - Accept: 'application/json', - Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`, - }, - }); - return response.json().then((data) => data.value); -} -/** - * getEnv can retrieve an OIDC token from an environment variable. - * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar - */ -async function getEnv() { - if (!process.env.SIGSTORE_ID_TOKEN) { - return Promise.reject('no token available'); - } - return process.env.SIGSTORE_ID_TOKEN; -} diff --git a/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/@sigstore/sign/dist/identity/index.js deleted file mode 100644 index 1c1223b443fab..0000000000000 --- a/node_modules/@sigstore/sign/dist/identity/index.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CIContextProvider = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var ci_1 = require("./ci"); -Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } }); diff --git a/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/@sigstore/sign/dist/identity/provider.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/@sigstore/sign/dist/identity/provider.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/dist/index.js b/node_modules/@sigstore/sign/dist/index.js deleted file mode 100644 index 383b76083361b..0000000000000 --- a/node_modules/@sigstore/sign/dist/index.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; -var bundler_1 = require("./bundler"); -Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } }); -Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } }); -var identity_1 = require("./identity"); -Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } }); -var signer_1 = require("./signer"); -Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } }); -Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } }); -var witness_1 = require("./witness"); -Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } }); -Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } }); -Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } }); diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js deleted file mode 100644 index 81b421eabadb2..0000000000000 --- a/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CAClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const fulcio_1 = require("../../external/fulcio"); -class CAClient { - constructor(options) { - this.fulcio = new fulcio_1.Fulcio({ - baseURL: options.fulcioBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async createSigningCertificate(identityToken, publicKey, challenge) { - const request = toCertificateRequest(identityToken, publicKey, challenge); - try { - const resp = await this.fulcio.createSigningCertificate(request); - // Account for the fact that the response may contain either a - // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. - const cert = resp.signedCertificateEmbeddedSct - ? resp.signedCertificateEmbeddedSct - : resp.signedCertificateDetachedSct; - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - return cert.chain.certificates; - } - catch (err) { - (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate'); - } - } -} -exports.CAClient = CAClient; -function toCertificateRequest(identityToken, publicKey, challenge) { - return { - credentials: { - oidcIdentityToken: identityToken, - }, - publicKeyRequest: { - publicKey: { - algorithm: 'ECDSA', - content: publicKey, - }, - proofOfPossession: challenge.toString('base64'), - }, - }; -} diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js deleted file mode 100644 index 481aa5c3579a2..0000000000000 --- a/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.EphemeralSigner = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const crypto_1 = __importDefault(require("crypto")); -const EC_KEYPAIR_TYPE = 'ec'; -const P256_CURVE = 'P-256'; -// Signer implementation which uses an ephemeral keypair to sign artifacts. -// The private key lives only in memory and is tied to the lifetime of the -// EphemeralSigner instance. -class EphemeralSigner { - constructor() { - this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, { - namedCurve: P256_CURVE, - }); - } - async sign(data) { - const signature = crypto_1.default.sign(null, data, this.keypair.privateKey); - const publicKey = this.keypair.publicKey - .export({ format: 'pem', type: 'spki' }) - .toString('ascii'); - return { - signature: signature, - key: { $case: 'publicKey', publicKey }, - }; - } -} -exports.EphemeralSigner = EphemeralSigner; diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js deleted file mode 100644 index 89a432548d2b4..0000000000000 --- a/node_modules/@sigstore/sign/dist/signer/fulcio/index.js +++ /dev/null @@ -1,87 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const util_1 = require("../../util"); -const ca_1 = require("./ca"); -const ephemeral_1 = require("./ephemeral"); -exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; -// Signer implementation which can be used to decorate another signer -// with a Fulcio-issued signing certificate for the signer's public key. -// Must be instantiated with an identity provider which can provide a JWT -// which represents the identity to be bound to the signing certificate. -class FulcioSigner { - constructor(options) { - this.ca = new ca_1.CAClient({ - ...options, - fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL, - }); - this.identityProvider = options.identityProvider; - this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner(); - } - async sign(data) { - // Retrieve identity token from the supplied identity provider - const identityToken = await this.getIdentityToken(); - // Extract challenge claim from OIDC token - let subject; - try { - subject = util_1.oidc.extractJWTSubject(identityToken); - } - catch (err) { - throw new error_1.InternalError({ - code: 'IDENTITY_TOKEN_PARSE_ERROR', - message: `invalid identity token: ${identityToken}`, - cause: err, - }); - } - // Construct challenge value by signing the subject claim - const challenge = await this.keyHolder.sign(Buffer.from(subject)); - if (challenge.key.$case !== 'publicKey') { - throw new error_1.InternalError({ - code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', - message: 'unexpected format for signing key', - }); - } - // Create signing certificate - const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature); - // Generate artifact signature - const signature = await this.keyHolder.sign(data); - // Specifically returning only the first certificate in the chain - // as the key. - return { - signature: signature.signature, - key: { - $case: 'x509Certificate', - certificate: certificates[0], - }, - }; - } - async getIdentityToken() { - try { - return await this.identityProvider.getToken(); - } - catch (err) { - throw new error_1.InternalError({ - code: 'IDENTITY_TOKEN_READ_ERROR', - message: 'error retrieving identity token', - cause: err, - }); - } - } -} -exports.FulcioSigner = FulcioSigner; diff --git a/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/@sigstore/sign/dist/signer/index.js deleted file mode 100644 index e2087767b81c1..0000000000000 --- a/node_modules/@sigstore/sign/dist/signer/index.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -/* istanbul ignore file */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var fulcio_1 = require("./fulcio"); -Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } }); -Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } }); diff --git a/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/@sigstore/sign/dist/signer/signer.js deleted file mode 100644 index b92c54183375d..0000000000000 --- a/node_modules/@sigstore/sign/dist/signer/signer.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/@sigstore/sign/dist/types/fetch.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/@sigstore/sign/dist/types/fetch.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/@sigstore/sign/dist/util/index.js deleted file mode 100644 index f467c9150c348..0000000000000 --- a/node_modules/@sigstore/sign/dist/util/index.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var core_1 = require("@sigstore/core"); -Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } }); -Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } }); -Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } }); -Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } }); -Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } }); -exports.oidc = __importStar(require("./oidc")); -exports.ua = __importStar(require("./ua")); diff --git a/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/@sigstore/sign/dist/util/oidc.js deleted file mode 100644 index 2f5947d7b6b87..0000000000000 --- a/node_modules/@sigstore/sign/dist/util/oidc.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.extractJWTSubject = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -function extractJWTSubject(jwt) { - const parts = jwt.split('.', 3); - const payload = JSON.parse(core_1.encoding.base64Decode(parts[1])); - switch (payload.iss) { - case 'https://accounts.google.com': - case 'https://oauth2.sigstore.dev/auth': - return payload.email; - default: - return payload.sub; - } -} -exports.extractJWTSubject = extractJWTSubject; diff --git a/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/@sigstore/sign/dist/util/ua.js deleted file mode 100644 index c142330eb8338..0000000000000 --- a/node_modules/@sigstore/sign/dist/util/ua.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getUserAgent = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const os_1 = __importDefault(require("os")); -// Format User-Agent: / () -// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent -const getUserAgent = () => { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const packageVersion = require('../../package.json').version; - const nodeVersion = process.version; - const platformName = os_1.default.platform(); - const archName = os_1.default.arch(); - return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`; -}; -exports.getUserAgent = getUserAgent; diff --git a/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/@sigstore/sign/dist/witness/index.js deleted file mode 100644 index 72677c399caa7..0000000000000 --- a/node_modules/@sigstore/sign/dist/witness/index.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -/* istanbul ignore file */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var tlog_1 = require("./tlog"); -Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } }); -Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } }); -var tsa_1 = require("./tsa"); -Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } }); diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/@sigstore/sign/dist/witness/tlog/client.js deleted file mode 100644 index 22c895f2ca7ed..0000000000000 --- a/node_modules/@sigstore/sign/dist/witness/tlog/client.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TLogClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const error_2 = require("../../external/error"); -const rekor_1 = require("../../external/rekor"); -class TLogClient { - constructor(options) { - this.fetchOnConflict = options.fetchOnConflict ?? false; - this.rekor = new rekor_1.Rekor({ - baseURL: options.rekorBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async createEntry(proposedEntry) { - let entry; - try { - entry = await this.rekor.createEntry(proposedEntry); - } - catch (err) { - // If the entry already exists, fetch it (if enabled) - if (entryExistsError(err) && this.fetchOnConflict) { - // Grab the UUID of the existing entry from the location header - /* istanbul ignore next */ - const uuid = err.location.split('/').pop() || ''; - try { - entry = await this.rekor.getEntry(uuid); - } - catch (err) { - (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry'); - } - } - else { - (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry'); - } - } - return entry; - } -} -exports.TLogClient = TLogClient; -function entryExistsError(value) { - return (value instanceof error_2.HTTPError && - value.statusCode === 409 && - value.location !== undefined); -} diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/@sigstore/sign/dist/witness/tlog/entry.js deleted file mode 100644 index c237523a2c9b2..0000000000000 --- a/node_modules/@sigstore/sign/dist/witness/tlog/entry.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toProposedEntry = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const bundle_1 = require("@sigstore/bundle"); -const util_1 = require("../../util"); -function toProposedEntry(content, publicKey, -// TODO: Remove this parameter once have completely switched to 'dsse' entries -entryType = 'intoto') { - switch (content.$case) { - case 'dsseEnvelope': - // TODO: Remove this conditional once have completely switched to 'dsse' entries - if (entryType === 'dsse') { - return toProposedDSSEEntry(content.dsseEnvelope, publicKey); - } - return toProposedIntotoEntry(content.dsseEnvelope, publicKey); - case 'messageSignature': - return toProposedHashedRekordEntry(content.messageSignature, publicKey); - } -} -exports.toProposedEntry = toProposedEntry; -// Returns a properly formatted Rekor "hashedrekord" entry for the given digest -// and signature -function toProposedHashedRekordEntry(messageSignature, publicKey) { - const hexDigest = messageSignature.messageDigest.digest.toString('hex'); - const b64Signature = messageSignature.signature.toString('base64'); - const b64Key = util_1.encoding.base64Encode(publicKey); - return { - apiVersion: '0.0.1', - kind: 'hashedrekord', - spec: { - data: { - hash: { - algorithm: 'sha256', - value: hexDigest, - }, - }, - signature: { - content: b64Signature, - publicKey: { - content: b64Key, - }, - }, - }, - }; -} -// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope -// and signature -function toProposedDSSEEntry(envelope, publicKey) { - const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope)); - const encodedKey = util_1.encoding.base64Encode(publicKey); - return { - apiVersion: '0.0.1', - kind: 'dsse', - spec: { - proposedContent: { - envelope: envelopeJSON, - verifiers: [encodedKey], - }, - }, - }; -} -// Returns a properly formatted Rekor "intoto" entry for the given DSSE -// envelope and signature -function toProposedIntotoEntry(envelope, publicKey) { - // Calculate the value for the payloadHash field in the Rekor entry - const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex'); - // Calculate the value for the hash field in the Rekor entry - const envelopeHash = calculateDSSEHash(envelope, publicKey); - // Collect values for re-creating the DSSE envelope. - // Double-encode payload and signature cause that's what Rekor expects - const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); - const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64')); - const keyid = envelope.signatures[0].keyid; - const encodedKey = util_1.encoding.base64Encode(publicKey); - // Create the envelope portion of the entry. Note the inclusion of the - // publicKey in the signature struct is not a standard part of a DSSE - // envelope, but is required by Rekor. - const dsse = { - payloadType: envelope.payloadType, - payload: payload, - signatures: [{ sig, publicKey: encodedKey }], - }; - // If the keyid is an empty string, Rekor seems to remove it altogether. We - // need to do the same here so that we can properly recreate the entry for - // verification. - if (keyid.length > 0) { - dsse.signatures[0].keyid = keyid; - } - return { - apiVersion: '0.0.2', - kind: 'intoto', - spec: { - content: { - envelope: dsse, - hash: { algorithm: 'sha256', value: envelopeHash }, - payloadHash: { algorithm: 'sha256', value: payloadHash }, - }, - }, - }; -} -// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry. -// There is no standard way to do this, so the scheme we're using as as -// follows: -// * payload is base64 encoded -// * signature is base64 encoded (only the first signature is used) -// * keyid is included ONLY if it is NOT an empty string -// * The resulting JSON is canonicalized and hashed to a hex string -function calculateDSSEHash(envelope, publicKey) { - const dsse = { - payloadType: envelope.payloadType, - payload: envelope.payload.toString('base64'), - signatures: [ - { sig: envelope.signatures[0].sig.toString('base64'), publicKey }, - ], - }; - // If the keyid is an empty string, Rekor seems to remove it altogether. - if (envelope.signatures[0].keyid.length > 0) { - dsse.signatures[0].keyid = envelope.signatures[0].keyid; - } - return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex'); -} diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/@sigstore/sign/dist/witness/tlog/index.js deleted file mode 100644 index 6197b09d4cdd9..0000000000000 --- a/node_modules/@sigstore/sign/dist/witness/tlog/index.js +++ /dev/null @@ -1,82 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const util_1 = require("../../util"); -const client_1 = require("./client"); -const entry_1 = require("./entry"); -exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev'; -class RekorWitness { - constructor(options) { - this.entryType = options.entryType; - this.tlog = new client_1.TLogClient({ - ...options, - rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL, - }); - } - async testify(content, publicKey) { - const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType); - const entry = await this.tlog.createEntry(proposedEntry); - return toTransparencyLogEntry(entry); - } -} -exports.RekorWitness = RekorWitness; -function toTransparencyLogEntry(entry) { - const logID = Buffer.from(entry.logID, 'hex'); - // Parse entry body so we can extract the kind and version. - const bodyJSON = util_1.encoding.base64Decode(entry.body); - const entryBody = JSON.parse(bodyJSON); - const promise = entry?.verification?.signedEntryTimestamp - ? inclusionPromise(entry.verification.signedEntryTimestamp) - : undefined; - const proof = entry?.verification?.inclusionProof - ? inclusionProof(entry.verification.inclusionProof) - : undefined; - const tlogEntry = { - logIndex: entry.logIndex.toString(), - logId: { - keyId: logID, - }, - integratedTime: entry.integratedTime.toString(), - kindVersion: { - kind: entryBody.kind, - version: entryBody.apiVersion, - }, - inclusionPromise: promise, - inclusionProof: proof, - canonicalizedBody: Buffer.from(entry.body, 'base64'), - }; - return { - tlogEntries: [tlogEntry], - }; -} -function inclusionPromise(promise) { - return { - signedEntryTimestamp: Buffer.from(promise, 'base64'), - }; -} -function inclusionProof(proof) { - return { - logIndex: proof.logIndex.toString(), - treeSize: proof.treeSize.toString(), - rootHash: Buffer.from(proof.rootHash, 'hex'), - hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), - checkpoint: { - envelope: proof.checkpoint, - }, - }; -} diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/@sigstore/sign/dist/witness/tsa/client.js deleted file mode 100644 index a334deb00b775..0000000000000 --- a/node_modules/@sigstore/sign/dist/witness/tsa/client.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const tsa_1 = require("../../external/tsa"); -const util_1 = require("../../util"); -class TSAClient { - constructor(options) { - this.tsa = new tsa_1.TimestampAuthority({ - baseURL: options.tsaBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async createTimestamp(signature) { - const request = { - artifactHash: util_1.crypto.hash(signature).toString('base64'), - hashAlgorithm: 'sha256', - }; - try { - return await this.tsa.createTimestamp(request); - } - catch (err) { - (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp'); - } - } -} -exports.TSAClient = TSAClient; diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/@sigstore/sign/dist/witness/tsa/index.js deleted file mode 100644 index d4f5c7c859d10..0000000000000 --- a/node_modules/@sigstore/sign/dist/witness/tsa/index.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAWitness = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const client_1 = require("./client"); -class TSAWitness { - constructor(options) { - this.tsa = new client_1.TSAClient({ - tsaBaseURL: options.tsaBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async testify(content) { - const signature = extractSignature(content); - const timestamp = await this.tsa.createTimestamp(signature); - return { - rfc3161Timestamps: [{ signedTimestamp: timestamp }], - }; - } -} -exports.TSAWitness = TSAWitness; -function extractSignature(content) { - switch (content.$case) { - case 'dsseEnvelope': - return content.dsseEnvelope.signatures[0].sig; - case 'messageSignature': - return content.messageSignature.signature; - } -} diff --git a/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/@sigstore/sign/dist/witness/witness.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/@sigstore/sign/dist/witness/witness.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js b/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js deleted file mode 100644 index c541b93001517..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const net = require('net') -const tls = require('tls') -const { once } = require('events') -const timers = require('timers/promises') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') -const Errors = require('./errors.js') -const { Agent: AgentBase } = require('agent-base') - -module.exports = class Agent extends AgentBase { - #options - #timeouts - #proxy - #noProxy - #ProxyAgent - - constructor (options = {}) { - const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) - - super(normalizedOptions) - - this.#options = normalizedOptions - this.#timeouts = timeouts - - if (proxy) { - this.#proxy = new URL(proxy) - this.#noProxy = noProxy - this.#ProxyAgent = getProxyAgent(proxy) - } - } - - get proxy () { - return this.#proxy ? { url: this.#proxy } : {} - } - - #getProxy (options) { - if (!this.#proxy) { - return - } - - const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { - proxy: this.#proxy, - noProxy: this.#noProxy, - }) - - if (!proxy) { - return - } - - const cacheKey = cacheOptions({ - ...options, - ...this.#options, - timeouts: this.#timeouts, - proxy, - }) - - if (proxyCache.has(cacheKey)) { - return proxyCache.get(cacheKey) - } - - let ProxyAgent = this.#ProxyAgent - if (Array.isArray(ProxyAgent)) { - ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] - } - - const proxyAgent = new ProxyAgent(proxy, { - ...this.#options, - socketOptions: { family: this.#options.family }, - }) - proxyCache.set(cacheKey, proxyAgent) - - return proxyAgent - } - - // takes an array of promises and races them against the connection timeout - // which will throw the necessary error if it is hit. This will return the - // result of the promise race. - async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { - if (timeout) { - const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) - .then(() => { - throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) - }).catch((err) => { - if (err.name === 'AbortError') { - return - } - throw err - }) - promises.push(connectionTimeout) - } - - let result - try { - result = await Promise.race(promises) - ac.abort() - } catch (err) { - ac.abort() - throw err - } - return result - } - - async connect (request, options) { - // if the connection does not have its own lookup function - // set, then use the one from our options - options.lookup ??= this.#options.lookup - - let socket - let timeout = this.#timeouts.connection - const isSecureEndpoint = this.isSecureEndpoint(options) - - const proxy = this.#getProxy(options) - if (proxy) { - // some of the proxies will wait for the socket to fully connect before - // returning so we have to await this while also racing it against the - // connection timeout. - const start = Date.now() - socket = await this.#timeoutConnection({ - options, - timeout, - promises: [proxy.connect(request, options)], - }) - // see how much time proxy.connect took and subtract it from - // the timeout - if (timeout) { - timeout = timeout - (Date.now() - start) - } - } else { - socket = (isSecureEndpoint ? tls : net).connect(options) - } - - socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) - socket.setNoDelay(this.keepAlive) - - const abortController = new AbortController() - const { signal } = abortController - - const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] - ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) - : Promise.resolve() - - await this.#timeoutConnection({ - options, - timeout, - promises: [ - connectPromise, - once(socket, 'error', { signal }).then((err) => { - throw err[0] - }), - ], - }, abortController) - - if (this.#timeouts.idle) { - socket.setTimeout(this.#timeouts.idle, () => { - socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) - }) - } - - return socket - } - - addRequest (request, options) { - const proxy = this.#getProxy(options) - // it would be better to call proxy.addRequest here but this causes the - // http-proxy-agent to call its super.addRequest which causes the request - // to be added to the agent twice. since we only support 3 agents - // currently (see the required agents in proxy.js) we have manually - // checked that the only public methods we need to call are called in the - // next block. this could change in the future and presumably we would get - // failing tests until we have properly called the necessary methods on - // each of our proxy agents - if (proxy?.setRequestProps) { - proxy.setRequestProps(request, options) - } - - request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') - - if (this.#timeouts.response) { - let responseTimeout - request.once('finish', () => { - setTimeout(() => { - request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) - }, this.#timeouts.response) - }) - request.once('response', () => { - clearTimeout(responseTimeout) - }) - } - - if (this.#timeouts.transfer) { - let transferTimeout - request.once('response', (res) => { - setTimeout(() => { - res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) - }, this.#timeouts.transfer) - res.once('close', () => { - clearTimeout(transferTimeout) - }) - }) - } - - return super.addRequest(request, options) - } -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js b/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js deleted file mode 100644 index 3c6946c566d73..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const dns = require('dns') - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -const cache = new LRUCache({ max: 50 }) - -const getOptions = ({ - family = 0, - hints = dns.ADDRCONFIG, - all = false, - verbatim = undefined, - ttl = 5 * 60 * 1000, - lookup = dns.lookup, -}) => ({ - // hints and lookup are returned since both are top level properties to (net|tls).connect - hints, - lookup: (hostname, ...args) => { - const callback = args.pop() // callback is always last arg - const lookupOptions = args[0] ?? {} - - const options = { - family, - hints, - all, - verbatim, - ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), - } - - const key = JSON.stringify({ hostname, ...options }) - - if (cache.has(key)) { - const cached = cache.get(key) - return process.nextTick(callback, null, ...cached) - } - - lookup(hostname, options, (err, ...result) => { - if (err) { - return callback(err) - } - - cache.set(key, result, { ttl }) - return callback(null, ...result) - }) - }, -}) - -module.exports = { - cache, - getOptions, -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js b/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js deleted file mode 100644 index 70475aec8eb35..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict' - -class InvalidProxyProtocolError extends Error { - constructor (url) { - super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) - this.code = 'EINVALIDPROXY' - this.proxy = url - } -} - -class ConnectionTimeoutError extends Error { - constructor (host) { - super(`Timeout connecting to host \`${host}\``) - this.code = 'ECONNECTIONTIMEOUT' - this.host = host - } -} - -class IdleTimeoutError extends Error { - constructor (host) { - super(`Idle timeout reached for host \`${host}\``) - this.code = 'EIDLETIMEOUT' - this.host = host - } -} - -class ResponseTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Response timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `connecting to host \`${request.host}\`` - super(msg) - this.code = 'ERESPONSETIMEOUT' - this.proxy = proxy - this.request = request - } -} - -class TransferTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Transfer timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `for \`${request.host}\`` - super(msg) - this.code = 'ETRANSFERTIMEOUT' - this.proxy = proxy - this.request = request - } -} - -module.exports = { - InvalidProxyProtocolError, - ConnectionTimeoutError, - IdleTimeoutError, - ResponseTimeoutError, - TransferTimeoutError, -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js b/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js deleted file mode 100644 index b33d6eaef07a2..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, proxyCache } = require('./proxy.js') -const dns = require('./dns.js') -const Agent = require('./agents.js') - -const agentCache = new LRUCache({ max: 20 }) - -const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { - // false has meaning so this can't be a simple truthiness check - if (agent != null) { - return agent - } - - url = new URL(url) - - const proxyForUrl = getProxy(url, { proxy, noProxy }) - const normalizedOptions = { - ...normalizeOptions(options), - proxy: proxyForUrl, - } - - const cacheKey = cacheOptions({ - ...normalizedOptions, - secureEndpoint: url.protocol === 'https:', - }) - - if (agentCache.has(cacheKey)) { - return agentCache.get(cacheKey) - } - - const newAgent = new Agent(normalizedOptions) - agentCache.set(cacheKey, newAgent) - - return newAgent -} - -module.exports = { - getAgent, - Agent, - // these are exported for backwards compatability - HttpAgent: Agent, - HttpsAgent: Agent, - cache: { - proxy: proxyCache, - agent: agentCache, - dns: dns.cache, - clear: () => { - proxyCache.clear() - agentCache.clear() - dns.cache.clear() - }, - }, -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js b/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js deleted file mode 100644 index 0bf53f725f084..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict' - -const dns = require('./dns') - -const normalizeOptions = (opts) => { - const family = parseInt(opts.family ?? '0', 10) - const keepAlive = opts.keepAlive ?? true - - const normalized = { - // nodejs http agent options. these are all the defaults - // but kept here to increase the likelihood of cache hits - // https://nodejs.org/api/http.html#new-agentoptions - keepAliveMsecs: keepAlive ? 1000 : undefined, - maxSockets: opts.maxSockets ?? 15, - maxTotalSockets: Infinity, - maxFreeSockets: keepAlive ? 256 : undefined, - scheduling: 'fifo', - // then spread the rest of the options - ...opts, - // we already set these to their defaults that we want - family, - keepAlive, - // our custom timeout options - timeouts: { - // the standard timeout option is mapped to our idle timeout - // and then deleted below - idle: opts.timeout ?? 0, - connection: 0, - response: 0, - transfer: 0, - ...opts.timeouts, - }, - // get the dns options that go at the top level of socket connection - ...dns.getOptions({ family, ...opts.dns }), - } - - // remove timeout since we already used it to set our own idle timeout - delete normalized.timeout - - return normalized -} - -const createKey = (obj) => { - let key = '' - const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) - for (let [k, v] of sorted) { - if (v == null) { - v = 'null' - } else if (v instanceof URL) { - v = v.toString() - } else if (typeof v === 'object') { - v = createKey(v) - } - key += `${k}:${v}:` - } - return key -} - -const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ - secureEndpoint: !!secureEndpoint, - // socket connect options - family: options.family, - hints: options.hints, - localAddress: options.localAddress, - // tls specific connect options - strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, - ca: secureEndpoint ? options.ca : null, - cert: secureEndpoint ? options.cert : null, - key: secureEndpoint ? options.key : null, - // http agent options - keepAlive: options.keepAlive, - keepAliveMsecs: options.keepAliveMsecs, - maxSockets: options.maxSockets, - maxTotalSockets: options.maxTotalSockets, - maxFreeSockets: options.maxFreeSockets, - scheduling: options.scheduling, - // timeout options - timeouts: options.timeouts, - // proxy - proxy: options.proxy, -}) - -module.exports = { - normalizeOptions, - cacheOptions, -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js deleted file mode 100644 index 6272e929e57bc..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict' - -const { HttpProxyAgent } = require('http-proxy-agent') -const { HttpsProxyAgent } = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -const { LRUCache } = require('lru-cache') -const { InvalidProxyProtocolError } = require('./errors.js') - -const PROXY_CACHE = new LRUCache({ max: 20 }) - -const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) - -const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) - -const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { - key = key.toLowerCase() - if (PROXY_ENV_KEYS.has(key)) { - acc[key] = value - } - return acc -}, {}) - -const getProxyAgent = (url) => { - url = new URL(url) - - const protocol = url.protocol.slice(0, -1) - if (SOCKS_PROTOCOLS.has(protocol)) { - return SocksProxyAgent - } - if (protocol === 'https' || protocol === 'http') { - return [HttpProxyAgent, HttpsProxyAgent] - } - - throw new InvalidProxyProtocolError(url) -} - -const isNoProxy = (url, noProxy) => { - if (typeof noProxy === 'string') { - noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) - } - - if (!noProxy || !noProxy.length) { - return false - } - - const hostSegments = url.hostname.split('.').reverse() - - return noProxy.some((no) => { - const noSegments = no.split('.').filter(Boolean).reverse() - if (!noSegments.length) { - return false - } - - for (let i = 0; i < noSegments.length; i++) { - if (hostSegments[i] !== noSegments[i]) { - return false - } - } - - return true - }) -} - -const getProxy = (url, { proxy, noProxy }) => { - url = new URL(url) - - if (!proxy) { - proxy = url.protocol === 'https:' - ? PROXY_ENV.https_proxy - : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy - } - - if (!noProxy) { - noProxy = PROXY_ENV.no_proxy - } - - if (!proxy || isNoProxy(url, noProxy)) { - return null - } - - return new URL(proxy) -} - -module.exports = { - getProxyAgent, - getProxy, - proxyCache: PROXY_CACHE, -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json b/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json deleted file mode 100644 index ef5b4e3228cc4..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "name": "@npmcli/agent", - "version": "2.2.2", - "description": "the http/https agent used by the npm cli", - "main": "lib/index.js", - "scripts": { - "gencerts": "bash scripts/create-cert.sh", - "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/agent/issues" - }, - "homepage": "https://github.com/npm/agent#readme", - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", - "publish": "true" - }, - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", - "minipass-fetch": "^3.0.3", - "nock": "^13.2.7", - "semver": "^7.5.4", - "simple-socks": "^3.1.0", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/agent.git" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md deleted file mode 100644 index 5fc208ff122e0..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js deleted file mode 100644 index cb5982f79077a..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js +++ /dev/null @@ -1,20 +0,0 @@ -// given an input that may or may not be an object, return an object that has -// a copy of every defined property listed in 'copy'. if the input is not an -// object, assign it to the property named by 'wrap' -const getOptions = (input, { copy, wrap }) => { - const result = {} - - if (input && typeof input === 'object') { - for (const prop of copy) { - if (input[prop] !== undefined) { - result[prop] = input[prop] - } - } - } else { - result[wrap] = input - } - - return result -} - -module.exports = getOptions diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js deleted file mode 100644 index 4d13bc037359d..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js +++ /dev/null @@ -1,9 +0,0 @@ -const semver = require('semver') - -const satisfies = (range) => { - return semver.satisfies(process.version, range, { includePrerelease: true }) -} - -module.exports = { - satisfies, -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE deleted file mode 100644 index 93546dfb7655b..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js deleted file mode 100644 index 1cd1e05d0c533..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js +++ /dev/null @@ -1,129 +0,0 @@ -'use strict' -const { inspect } = require('util') - -// adapted from node's internal/errors -// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js - -// close copy of node's internal SystemError class. -class SystemError { - constructor (code, prefix, context) { - // XXX context.code is undefined in all constructors used in cp/polyfill - // that may be a bug copied from node, maybe the constructor should use - // `code` not `errno`? nodejs/node#41104 - let message = `${prefix}: ${context.syscall} returned ` + - `${context.code} (${context.message})` - - if (context.path !== undefined) { - message += ` ${context.path}` - } - if (context.dest !== undefined) { - message += ` => ${context.dest}` - } - - this.code = code - Object.defineProperties(this, { - name: { - value: 'SystemError', - enumerable: false, - writable: true, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - info: { - value: context, - enumerable: true, - configurable: true, - writable: false, - }, - errno: { - get () { - return context.errno - }, - set (value) { - context.errno = value - }, - enumerable: true, - configurable: true, - }, - syscall: { - get () { - return context.syscall - }, - set (value) { - context.syscall = value - }, - enumerable: true, - configurable: true, - }, - }) - - if (context.path !== undefined) { - Object.defineProperty(this, 'path', { - get () { - return context.path - }, - set (value) { - context.path = value - }, - enumerable: true, - configurable: true, - }) - } - - if (context.dest !== undefined) { - Object.defineProperty(this, 'dest', { - get () { - return context.dest - }, - set (value) { - context.dest = value - }, - enumerable: true, - configurable: true, - }) - } - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } - - [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { - return inspect(this, { - ...ctx, - getters: true, - customInspect: false, - }) - } -} - -function E (code, message) { - module.exports[code] = class NodeError extends SystemError { - constructor (ctx) { - super(code, message, ctx) - } - } -} - -E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') -E('ERR_FS_CP_EEXIST', 'Target already exists') -E('ERR_FS_CP_EINVAL', 'Invalid src or dest') -E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') -E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') -E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') -E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') -E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') -E('ERR_FS_EISDIR', 'Path is a directory') - -module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { - constructor (name, expected, actual) { - super() - this.code = 'ERR_INVALID_ARG_TYPE' - this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` - } -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js deleted file mode 100644 index 972ce7aa12abe..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('fs/promises') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 16.7.0 added fs.cp -const useNative = node.satisfies('>=16.7.0') - -const cp = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.cp(src, dest, options) - : polyfill(src, dest, options) -} - -module.exports = cp diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js deleted file mode 100644 index 80eb10de97191..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js +++ /dev/null @@ -1,428 +0,0 @@ -// this file is a modified version of the code in node 17.2.0 -// which is, in turn, a modified version of the fs-extra module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -'use strict' - -const { - ERR_FS_CP_DIR_TO_NON_DIR, - ERR_FS_CP_EEXIST, - ERR_FS_CP_EINVAL, - ERR_FS_CP_FIFO_PIPE, - ERR_FS_CP_NON_DIR_TO_DIR, - ERR_FS_CP_SOCKET, - ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, - ERR_FS_CP_UNKNOWN, - ERR_FS_EISDIR, - ERR_INVALID_ARG_TYPE, -} = require('./errors.js') -const { - constants: { - errno: { - EEXIST, - EISDIR, - EINVAL, - ENOTDIR, - }, - }, -} = require('os') -const { - chmod, - copyFile, - lstat, - mkdir, - readdir, - readlink, - stat, - symlink, - unlink, - utimes, -} = require('fs/promises') -const { - dirname, - isAbsolute, - join, - parse, - resolve, - sep, - toNamespacedPath, -} = require('path') -const { fileURLToPath } = require('url') - -const defaultOptions = { - dereference: false, - errorOnExist: false, - filter: undefined, - force: true, - preserveTimestamps: false, - recursive: false, -} - -async function cp (src, dest, opts) { - if (opts != null && typeof opts !== 'object') { - throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) - } - return cpFn( - toNamespacedPath(getValidatedPath(src)), - toNamespacedPath(getValidatedPath(dest)), - { ...defaultOptions, ...opts }) -} - -function getValidatedPath (fileURLOrPath) { - const path = fileURLOrPath != null && fileURLOrPath.href - && fileURLOrPath.origin - ? fileURLToPath(fileURLOrPath) - : fileURLOrPath - return path -} - -async function cpFn (src, dest, opts) { - // Warn about using preserveTimestamps on 32-bit node - // istanbul ignore next - if (opts.preserveTimestamps && process.arch === 'ia32') { - const warning = 'Using the preserveTimestamps option in 32-bit ' + - 'node is not recommended' - process.emitWarning(warning, 'TimestampPrecisionWarning') - } - const stats = await checkPaths(src, dest, opts) - const { srcStat, destStat } = stats - await checkParentPaths(src, srcStat, dest) - if (opts.filter) { - return handleFilter(checkParentDir, destStat, src, dest, opts) - } - return checkParentDir(destStat, src, dest, opts) -} - -async function checkPaths (src, dest, opts) { - const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: 'src and dest cannot be the same', - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new ERR_FS_CP_DIR_TO_NON_DIR({ - message: `cannot overwrite directory ${src} ` + - `with non-directory ${dest}`, - path: dest, - syscall: 'cp', - errno: EISDIR, - }) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new ERR_FS_CP_NON_DIR_TO_DIR({ - message: `cannot overwrite non-directory ${src} ` + - `with directory ${dest}`, - path: dest, - syscall: 'cp', - errno: ENOTDIR, - }) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return { srcStat, destStat } -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && - destStat.dev === srcStat.dev -} - -function getStats (src, dest, opts) { - const statFunc = opts.dereference ? - (file) => stat(file, { bigint: true }) : - (file) => lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch((err) => { - // istanbul ignore next: unsure how to cover. - if (err.code === 'ENOENT') { - return null - } - // istanbul ignore next: unsure how to cover. - throw err - }), - ]) -} - -async function checkParentDir (destStat, src, dest, opts) { - const destParent = dirname(dest) - const dirExists = await pathExists(destParent) - if (dirExists) { - return getStatsForCopy(destStat, src, dest, opts) - } - await mkdir(destParent, { recursive: true }) - return getStatsForCopy(destStat, src, dest, opts) -} - -function pathExists (dest) { - return stat(dest).then( - () => true, - // istanbul ignore next: not sure when this would occur - (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) -} - -// Recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest) { - const srcParent = resolve(dirname(src)) - const destParent = resolve(dirname(dest)) - if (destParent === srcParent || destParent === parse(destParent).root) { - return - } - let destStat - try { - destStat = await stat(destParent, { bigint: true }) - } catch (err) { - // istanbul ignore else: not sure when this would occur - if (err.code === 'ENOENT') { - return - } - // istanbul ignore next: not sure when this would occur - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return checkParentPaths(src, srcStat, destParent) -} - -const normalizePathToArray = (path) => - resolve(path).split(sep).filter(Boolean) - -// Return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = normalizePathToArray(src) - const destArr = normalizePathToArray(dest) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -async function handleFilter (onInclude, destStat, src, dest, opts, cb) { - const include = await opts.filter(src, dest) - if (include) { - return onInclude(destStat, src, dest, opts, cb) - } -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter) { - return handleFilter(getStatsForCopy, destStat, src, dest, opts) - } - return getStatsForCopy(destStat, src, dest, opts) -} - -async function getStatsForCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? stat : lstat - const srcStat = await statFn(src) - // istanbul ignore else: can't portably test FIFO - if (srcStat.isDirectory() && opts.recursive) { - return onDir(srcStat, destStat, src, dest, opts) - } else if (srcStat.isDirectory()) { - throw new ERR_FS_EISDIR({ - message: `${src} is a directory (not copied)`, - path: src, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) { - return onFile(srcStat, destStat, src, dest, opts) - } else if (srcStat.isSymbolicLink()) { - return onLink(destStat, src, dest) - } else if (srcStat.isSocket()) { - throw new ERR_FS_CP_SOCKET({ - message: `cannot copy a socket file: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFIFO()) { - throw new ERR_FS_CP_FIFO_PIPE({ - message: `cannot copy a FIFO pipe: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // istanbul ignore next: should be unreachable - throw new ERR_FS_CP_UNKNOWN({ - message: `cannot copy an unknown file type: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return _copyFile(srcStat, src, dest, opts) - } - return mayCopyFile(srcStat, src, dest, opts) -} - -async function mayCopyFile (srcStat, src, dest, opts) { - if (opts.force) { - await unlink(dest) - return _copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new ERR_FS_CP_EEXIST({ - message: `${dest} already exists`, - path: dest, - syscall: 'cp', - errno: EEXIST, - }) - } -} - -async function _copyFile (srcStat, src, dest, opts) { - await copyFile(src, dest) - if (opts.preserveTimestamps) { - return handleTimestampsAndMode(srcStat.mode, src, dest) - } - return setDestMode(dest, srcStat.mode) -} - -async function handleTimestampsAndMode (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - await makeFileWritable(dest, srcMode) - return setDestTimestampsAndMode(srcMode, src, dest) - } - return setDestTimestampsAndMode(srcMode, src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -async function setDestTimestampsAndMode (srcMode, src, dest) { - await setDestTimestamps(src, dest) - return setDestMode(dest, srcMode) -} - -function setDestMode (dest, srcMode) { - return chmod(dest, srcMode) -} - -async function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await stat(src) - return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return mkDirAndCopy(srcStat.mode, src, dest, opts) - } - return copyDir(src, dest, opts) -} - -async function mkDirAndCopy (srcMode, src, dest, opts) { - await mkdir(dest) - await copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -async function copyDir (src, dest, opts) { - const dir = await readdir(src) - for (let i = 0; i < dir.length; i++) { - const item = dir[i] - const srcItem = join(src, item) - const destItem = join(dest, item) - const { destStat } = await checkPaths(srcItem, destItem, opts) - await startCopy(destStat, srcItem, destItem, opts) - } -} - -async function onLink (destStat, src, dest) { - let resolvedSrc = await readlink(src) - if (!isAbsolute(resolvedSrc)) { - resolvedSrc = resolve(dirname(src), resolvedSrc) - } - if (!destStat) { - return symlink(resolvedSrc, dest) - } - let resolvedDest - try { - resolvedDest = await readlink(dest) - } catch (err) { - // Dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - // istanbul ignore next: can only test on windows - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { - return symlink(resolvedSrc, dest) - } - // istanbul ignore next: should not be possible - throw err - } - if (!isAbsolute(resolvedDest)) { - resolvedDest = resolve(dirname(dest), resolvedDest) - } - if (isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + - `${resolvedDest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // Do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - const srcStat = await stat(src) - if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ - message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return copyLink(resolvedSrc, dest) -} - -async function copyLink (resolvedSrc, dest) { - await unlink(dest) - return symlink(resolvedSrc, dest) -} - -module.exports = cp diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js deleted file mode 100644 index 81c746304cc42..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const cp = require('./cp/index.js') -const withTempDir = require('./with-temp-dir.js') -const readdirScoped = require('./readdir-scoped.js') -const moveFile = require('./move-file.js') - -module.exports = { - cp, - withTempDir, - readdirScoped, - moveFile, -} diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js deleted file mode 100644 index d56e06d384659..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js +++ /dev/null @@ -1,78 +0,0 @@ -const { dirname, join, resolve, relative, isAbsolute } = require('path') -const fs = require('fs/promises') - -const pathExists = async path => { - try { - await fs.access(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && await pathExists(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - await fs.mkdir(dirname(destination), { recursive: true }) - - try { - await fs.rename(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = await fs.lstat(source) - if (sourceStat.isDirectory()) { - const files = await fs.readdir(source) - await Promise.all(files.map((file) => - moveFile(join(source, file), join(destination, file), options, false, symlinks) - )) - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - await fs.copyFile(source, destination) - } - } else { - throw error - } - } - - if (root) { - await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { - let target = await fs.readlink(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = await fs.stat(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - await fs.symlink( - target, - symDestination, - targetStat - ) - })) - await fs.rm(source, { recursive: true, force: true }) - } -} - -module.exports = moveFile diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js deleted file mode 100644 index cd601dfbe7486..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js +++ /dev/null @@ -1,20 +0,0 @@ -const { readdir } = require('fs/promises') -const { join } = require('path') - -const readdirScoped = async (dir) => { - const results = [] - - for (const item of await readdir(dir)) { - if (item.startsWith('@')) { - for (const scopedItem of await readdir(join(dir, item))) { - results.push(join(item, scopedItem)) - } - } else { - results.push(item) - } - } - - return results -} - -module.exports = readdirScoped diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js deleted file mode 100644 index 0738ac4f29e1b..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js +++ /dev/null @@ -1,39 +0,0 @@ -const { join, sep } = require('path') - -const getOptions = require('./common/get-options.js') -const { mkdir, mkdtemp, rm } = require('fs/promises') - -// create a temp directory, ensure its permissions match its parent, then call -// the supplied function passing it the path to the directory. clean up after -// the function finishes, whether it throws or not -const withTempDir = async (root, fn, opts) => { - const options = getOptions(opts, { - copy: ['tmpPrefix'], - }) - // create the directory - await mkdir(root, { recursive: true }) - - const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) - let err - let result - - try { - result = await fn(target) - } catch (_err) { - err = _err - } - - try { - await rm(target, { force: true, recursive: true }) - } catch { - // ignore errors - } - - if (err) { - throw err - } - - return result -} - -module.exports = withTempDir diff --git a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json b/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json deleted file mode 100644 index 5261a11b78000..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "@npmcli/fs", - "version": "3.1.1", - "description": "filesystem utilities for the npm cli", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "snap": "tap", - "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/fs.git" - }, - "keywords": [ - "npm", - "oss" - ], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.1" - }, - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md b/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 5f6192c3cec56..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,165 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - - if (stat.size !== data.length) { - throw sizeError(stat.size, data.length) - } - - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath) => { - return fs.copyFile(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index ce58d679e4cb2..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const contentPath = require('./path') -const { hasContent } = require('./read') - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) - return true - } else { - return false - } -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js deleted file mode 100644 index e7187abca8788..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const events = require('events') - -const contentPath = require('./path') -const fs = require('fs/promises') -const { moveFile } = require('@npmcli/fs') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -// Cache of move operations in process so we don't duplicate -const moveOperations = new Map() - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - for (const algo in sri) { - const tmp = await makeTmp(cache, opts) - const hash = sri[algo].toString() - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, hash, opts) - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } - } - return { integrity: sri, size: data.length } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - this.handleContentP.catch(error => this.emit('error', error)) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - if (moveOperations.has(destination)) { - return moveOperations.get(destination) - } - moveOperations.set( - destination, - fs.mkdir(destDir, { recursive: true }) - .then(async () => { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - return tmp.moved - }) - .catch(err => { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } - }).finally(() => { - moveOperations.delete(destination) - }) - - ) - return moveOperations.get(destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 89c28f2f257d4..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,336 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { - appendFile, - mkdir, - readFile, - readdir, - rm, - writeFile, -} = require('fs/promises') -const { Minipass } = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const { moveFile } = require('@npmcli/fs') - -const pMap = require('p-map') -const lsStreamConcurrency = 5 - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await mkdir(path.dirname(target), { recursive: true }) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rm(tmp.target, { recursive: true, force: true }) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await mkdir(path.dirname(bucket), { recursive: true }) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size, time } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: time || Date.now(), - size, - metadata, - } - try { - await mkdir(path.dirname(bucket), { recursive: true }) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rm(bucket, { recursive: true, force: true }) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await pMap(buckets, async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await pMap(subbuckets, async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await pMap(subbucketEntries, async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (_) { - // eslint-ignore-next-line no-empty-block - } - // coverage disabled here, no need to test with an entry that parses to something falsey - // istanbul ignore else - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js deleted file mode 100644 index 80ec206c7ecaa..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js deleted file mode 100644 index c9b0da5f3a271..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 2ecc60912e456..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') - -const MEMOIZED = new LRUCache({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js deleted file mode 100644 index a94760c7cf243..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const { rm } = require('fs/promises') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -async function all (cache) { - memo.clearMemoized() - const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) - return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js deleted file mode 100644 index 8500c1c16a429..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const { glob } = require('glob') -const path = require('path') - -const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) -module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0bf5302136ebe..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const { withTempDir } = require('@npmcli/fs') -const fs = require('fs/promises') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return withTempDir(path.join(cache, 'tmp'), cb, opts) -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js b/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js deleted file mode 100644 index d7423da1295b6..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const { - mkdir, - readFile, - rm, - stat, - truncate, - writeFile, -} = require('fs/promises') -const pMap = require('p-map') -const contentPath = require('./content/path') -const fsm = require('fs-minipass') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const path = require('path') -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime () { - return { startTime: new Date() } -} - -async function markEndTime () { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await mkdir(cache, { recursive: true }) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rm it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - // integrity is stringified, re-parse it so we can get each hash - const integrity = ssri.parse(entry.integrity) - for (const algo in integrity) { - liveContent.add(integrity[algo].toString()) - } - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await stat(f) - await rm(f, { recursive: true, force: true }) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rm(filepath, { recursive: true, force: true }) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats) { - await truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - time: entry.time, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) -} - -async function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - return writeFile(verifile, `${Date.now()}`) -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/@sigstore/sign/node_modules/cacache/package.json b/node_modules/@sigstore/sign/node_modules/cacache/package.json deleted file mode 100644 index 6e6219158ed75..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/cacache/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "cacache", - "version": "18.0.4", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "4.22.0", - "publish": "true" - }, - "author": "GitHub Inc.", - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2844231..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index bfcfacbcc95e1..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,471 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const { Minipass } = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - for (const name of options.cacheAdditionalHeaders) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }).catch((err) => { - body.emit('error', err) - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - for (const name of options.cacheAdditionalHeaders) { - const inMeta = hasOwnProperty(metadata.resHeaders, name) - const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) - const inPolicy = hasOwnProperty(this.policy.response.headers, name) - - // if the header is in the existing entry, but it is not in the metadata - // then we need to write it to the metadata as this will refresh the on-disk cache - if (!inMeta && inEntry) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - // if the header is in the metadata, but not in the policy, then we need to set - // it in the policy so that it's included in the immediate response. future - // responses will load a new cache entry, so we don't need to change that - if (!inPolicy && inMeta) { - this.policy.response.headers[name] = metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a66573bebe6..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d23fb933..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d562b7fa..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c8600dae9..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67e16550..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e1b6113..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index f77511279f831..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,54 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b2d0ce3..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index 8554564074de6..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,131 +0,0 @@ -const { Minipass } = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') -const { log } = require('proc-log') - -const CachingMinipassPipeline = require('./pipeline.js') -const { getAgent } = require('@npmcli/agent') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - // from @npmcli/agent - 'ECONNECTIONTIMEOUT', - 'EIDLETIMEOUT', - 'ERESPONSETIMEOUT', - 'ETRANSFERTIMEOUT', - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) - // EINVALIDPROXY // invalid protocol from @npmcli/agent - // EINVALIDRESPONSE // invalid status code from @npmcli/agent -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - /* eslint-disable-next-line max-len */ - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json deleted file mode 100644 index 7adb4d1e7f971..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "13.0.1", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", - "nock": "^13.2.4", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", - "publish": "true" - } -} diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE b/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE deleted file mode 100644 index 3c3410cdc12ee..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Isaac Z. Schlueter and Contributors -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - ---- - -Note: This is a derivative work based on "node-fetch" by David Frank, -modified and distributed under the terms of the MIT license above. -https://github.com/bitinn/node-fetch diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js b/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js deleted file mode 100644 index b18f643269e37..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' -class AbortError extends Error { - constructor (message) { - super(message) - this.code = 'FETCH_ABORTED' - this.type = 'aborted' - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'AbortError' - } - - // don't allow name to be overridden, but don't throw either - set name (s) {} -} -module.exports = AbortError diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js b/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js deleted file mode 100644 index 121b1730102e7..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js +++ /dev/null @@ -1,97 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const TYPE = Symbol('type') -const BUFFER = Symbol('buffer') - -class Blob { - constructor (blobParts, options) { - this[TYPE] = '' - - const buffers = [] - let size = 0 - - if (blobParts) { - const a = blobParts - const length = Number(a.length) - for (let i = 0; i < length; i++) { - const element = a[i] - const buffer = element instanceof Buffer ? element - : ArrayBuffer.isView(element) - ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) - : element instanceof ArrayBuffer ? Buffer.from(element) - : element instanceof Blob ? element[BUFFER] - : typeof element === 'string' ? Buffer.from(element) - : Buffer.from(String(element)) - size += buffer.length - buffers.push(buffer) - } - } - - this[BUFFER] = Buffer.concat(buffers, size) - - const type = options && options.type !== undefined - && String(options.type).toLowerCase() - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type - } - } - - get size () { - return this[BUFFER].length - } - - get type () { - return this[TYPE] - } - - text () { - return Promise.resolve(this[BUFFER].toString()) - } - - arrayBuffer () { - const buf = this[BUFFER] - const off = buf.byteOffset - const len = buf.byteLength - const ab = buf.buffer.slice(off, off + len) - return Promise.resolve(ab) - } - - stream () { - return new Minipass().end(this[BUFFER]) - } - - slice (start, end, type) { - const size = this.size - const relativeStart = start === undefined ? 0 - : start < 0 ? Math.max(size + start, 0) - : Math.min(start, size) - const relativeEnd = end === undefined ? size - : end < 0 ? Math.max(size + end, 0) - : Math.min(end, size) - const span = Math.max(relativeEnd - relativeStart, 0) - - const buffer = this[BUFFER] - const slicedBuffer = buffer.slice( - relativeStart, - relativeStart + span - ) - const blob = new Blob([], { type }) - blob[BUFFER] = slicedBuffer - return blob - } - - get [Symbol.toStringTag] () { - return 'Blob' - } - - static get BUFFER () { - return BUFFER - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, -}) - -module.exports = Blob diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js b/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js deleted file mode 100644 index 62286bd1de0d9..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js +++ /dev/null @@ -1,350 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const MinipassSized = require('minipass-sized') - -const Blob = require('./blob.js') -const { BUFFER } = Blob -const FetchError = require('./fetch-error.js') - -// optional dependency on 'encoding' -let convert -try { - convert = require('encoding').convert -} catch (e) { - // defer error until textConverted is called -} - -const INTERNALS = Symbol('Body internals') -const CONSUME_BODY = Symbol('consumeBody') - -class Body { - constructor (bodyArg, options = {}) { - const { size = 0, timeout = 0 } = options - const body = bodyArg === undefined || bodyArg === null ? null - : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) - : isBlob(bodyArg) ? bodyArg - : Buffer.isBuffer(bodyArg) ? bodyArg - : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' - ? Buffer.from(bodyArg) - : ArrayBuffer.isView(bodyArg) - ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) - : Minipass.isStream(bodyArg) ? bodyArg - : Buffer.from(String(bodyArg)) - - this[INTERNALS] = { - body, - disturbed: false, - error: null, - } - - this.size = size - this.timeout = timeout - - if (Minipass.isStream(body)) { - body.on('error', er => { - const error = er.name === 'AbortError' ? er - : new FetchError(`Invalid response while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - this[INTERNALS].error = error - }) - } - } - - get body () { - return this[INTERNALS].body - } - - get bodyUsed () { - return this[INTERNALS].disturbed - } - - arrayBuffer () { - return this[CONSUME_BODY]().then(buf => - buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) - } - - blob () { - const ct = this.headers && this.headers.get('content-type') || '' - return this[CONSUME_BODY]().then(buf => Object.assign( - new Blob([], { type: ct.toLowerCase() }), - { [BUFFER]: buf } - )) - } - - async json () { - const buf = await this[CONSUME_BODY]() - try { - return JSON.parse(buf.toString()) - } catch (er) { - throw new FetchError( - `invalid json response body at ${this.url} reason: ${er.message}`, - 'invalid-json' - ) - } - } - - text () { - return this[CONSUME_BODY]().then(buf => buf.toString()) - } - - buffer () { - return this[CONSUME_BODY]() - } - - textConverted () { - return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) - } - - [CONSUME_BODY] () { - if (this[INTERNALS].disturbed) { - return Promise.reject(new TypeError(`body used already for: ${ - this.url}`)) - } - - this[INTERNALS].disturbed = true - - if (this[INTERNALS].error) { - return Promise.reject(this[INTERNALS].error) - } - - // body is null - if (this.body === null) { - return Promise.resolve(Buffer.alloc(0)) - } - - if (Buffer.isBuffer(this.body)) { - return Promise.resolve(this.body) - } - - const upstream = isBlob(this.body) ? this.body.stream() : this.body - - /* istanbul ignore if: should never happen */ - if (!Minipass.isStream(upstream)) { - return Promise.resolve(Buffer.alloc(0)) - } - - const stream = this.size && upstream instanceof MinipassSized ? upstream - : !this.size && upstream instanceof Minipass && - !(upstream instanceof MinipassSized) ? upstream - : this.size ? new MinipassSized({ size: this.size }) - : new Minipass() - - // allow timeout on slow response body, but only if the stream is still writable. this - // makes the timeout center on the socket stream from lib/index.js rather than the - // intermediary minipass stream we create to receive the data - const resTimeout = this.timeout && stream.writable ? setTimeout(() => { - stream.emit('error', new FetchError( - `Response timeout while trying to fetch ${ - this.url} (over ${this.timeout}ms)`, 'body-timeout')) - }, this.timeout) : null - - // do not keep the process open just for this timeout, even - // though we expect it'll get cleared eventually. - if (resTimeout && resTimeout.unref) { - resTimeout.unref() - } - - // do the pipe in the promise, because the pipe() can send too much - // data through right away and upset the MP Sized object - return new Promise((resolve) => { - // if the stream is some other kind of stream, then pipe through a MP - // so we can collect it more easily. - if (stream !== upstream) { - upstream.on('error', er => stream.emit('error', er)) - upstream.pipe(stream) - } - resolve() - }).then(() => stream.concat()).then(buf => { - clearTimeout(resTimeout) - return buf - }).catch(er => { - clearTimeout(resTimeout) - // request was aborted, reject with this Error - if (er.name === 'AbortError' || er.name === 'FetchError') { - throw er - } else if (er.name === 'RangeError') { - throw new FetchError(`Could not create Buffer from response body for ${ - this.url}: ${er.message}`, 'system', er) - } else { - // other errors, such as incorrect content-encoding or content-length - throw new FetchError(`Invalid response body while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - } - }) - } - - static clone (instance) { - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used') - } - - const body = instance.body - - // check that body is a stream and not form-data object - // NB: can't clone the form-data object without having it as a dependency - if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { - // create a dedicated tee stream so that we don't lose data - // potentially sitting in the body stream's buffer by writing it - // immediately to p1 and not having it for p2. - const tee = new Minipass() - const p1 = new Minipass() - const p2 = new Minipass() - tee.on('error', er => { - p1.emit('error', er) - p2.emit('error', er) - }) - body.on('error', er => tee.emit('error', er)) - tee.pipe(p1) - tee.pipe(p2) - body.pipe(tee) - // set instance body to one fork, return the other - instance[INTERNALS].body = p1 - return p2 - } else { - return instance.body - } - } - - static extractContentType (body) { - return body === null || body === undefined ? null - : typeof body === 'string' ? 'text/plain;charset=UTF-8' - : isURLSearchParams(body) - ? 'application/x-www-form-urlencoded;charset=UTF-8' - : isBlob(body) ? body.type || null - : Buffer.isBuffer(body) ? null - : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null - : ArrayBuffer.isView(body) ? null - : typeof body.getBoundary === 'function' - ? `multipart/form-data;boundary=${body.getBoundary()}` - : Minipass.isStream(body) ? null - : 'text/plain;charset=UTF-8' - } - - static getTotalBytes (instance) { - const { body } = instance - return (body === null || body === undefined) ? 0 - : isBlob(body) ? body.size - : Buffer.isBuffer(body) ? body.length - : body && typeof body.getLengthSync === 'function' && ( - // detect form data input from form-data module - body._lengthRetrievers && - /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) // 2.x - ? body.getLengthSync() - : null - } - - static writeToStream (dest, instance) { - const { body } = instance - - if (body === null || body === undefined) { - dest.end() - } else if (Buffer.isBuffer(body) || typeof body === 'string') { - dest.end(body) - } else { - // body is stream or blob - const stream = isBlob(body) ? body.stream() : body - stream.on('error', er => dest.emit('error', er)).pipe(dest) - } - - return dest - } -} - -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true }, -}) - -const isURLSearchParams = obj => - // Duck-typing as a necessary condition. - (typeof obj !== 'object' || - typeof obj.append !== 'function' || - typeof obj.delete !== 'function' || - typeof obj.get !== 'function' || - typeof obj.getAll !== 'function' || - typeof obj.has !== 'function' || - typeof obj.set !== 'function') ? false - // Brand-checking and more duck-typing as optional condition. - : obj.constructor.name === 'URLSearchParams' || - Object.prototype.toString.call(obj) === '[object URLSearchParams]' || - typeof obj.sort === 'function' - -const isBlob = obj => - typeof obj === 'object' && - typeof obj.arrayBuffer === 'function' && - typeof obj.type === 'string' && - typeof obj.stream === 'function' && - typeof obj.constructor === 'function' && - typeof obj.constructor.name === 'string' && - /^(Blob|File)$/.test(obj.constructor.name) && - /^(Blob|File)$/.test(obj[Symbol.toStringTag]) - -const convertBody = (buffer, headers) => { - /* istanbul ignore if */ - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function') - } - - const ct = headers && headers.get('content-type') - let charset = 'utf-8' - let res - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct) - } - - // no charset in content type, peek at response body for at most 1024 bytes - const str = buffer.slice(0, 1024).toString() - - // html5 - if (!res && str) { - res = / this.expect - ? 'max-size' : type - this.message = message - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'FetchError' - } - - // don't allow name to be overwritten - set name (n) {} - - get [Symbol.toStringTag] () { - return 'FetchError' - } -} -module.exports = FetchError diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js b/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js deleted file mode 100644 index dd6e854d5ba39..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js +++ /dev/null @@ -1,267 +0,0 @@ -'use strict' -const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - -const validateName = name => { - name = `${name}` - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`) - } -} - -const validateValue = value => { - value = `${value}` - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`) - } -} - -const find = (map, name) => { - name = name.toLowerCase() - for (const key in map) { - if (key.toLowerCase() === name) { - return key - } - } - return undefined -} - -const MAP = Symbol('map') -class Headers { - constructor (init = undefined) { - this[MAP] = Object.create(null) - if (init instanceof Headers) { - const rawHeaders = init.raw() - const headerNames = Object.keys(rawHeaders) - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value) - } - } - return - } - - // no-op - if (init === undefined || init === null) { - return - } - - if (typeof init === 'object') { - const method = init[Symbol.iterator] - if (method !== null && method !== undefined) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable') - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = [] - for (const pair of init) { - if (typeof pair !== 'object' || - typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable') - } - const arrPair = Array.from(pair) - if (arrPair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple') - } - pairs.push(arrPair) - } - - for (const pair of pairs) { - this.append(pair[0], pair[1]) - } - } else { - // record - for (const key of Object.keys(init)) { - this.append(key, init[key]) - } - } - } else { - throw new TypeError('Provided initializer must be an object') - } - } - - get (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key === undefined) { - return null - } - - return this[MAP][key].join(', ') - } - - forEach (callback, thisArg = undefined) { - let pairs = getHeaders(this) - for (let i = 0; i < pairs.length; i++) { - const [name, value] = pairs[i] - callback.call(thisArg, value, name, this) - // refresh in case the callback added more headers - pairs = getHeaders(this) - } - } - - set (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - this[MAP][key !== undefined ? key : name] = [value] - } - - append (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - if (key !== undefined) { - this[MAP][key].push(value) - } else { - this[MAP][name] = [value] - } - } - - has (name) { - name = `${name}` - validateName(name) - return find(this[MAP], name) !== undefined - } - - delete (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key !== undefined) { - delete this[MAP][key] - } - } - - raw () { - return this[MAP] - } - - keys () { - return new HeadersIterator(this, 'key') - } - - values () { - return new HeadersIterator(this, 'value') - } - - [Symbol.iterator] () { - return new HeadersIterator(this, 'key+value') - } - - entries () { - return new HeadersIterator(this, 'key+value') - } - - get [Symbol.toStringTag] () { - return 'Headers' - } - - static exportNodeCompatibleHeaders (headers) { - const obj = Object.assign(Object.create(null), headers[MAP]) - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host') - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0] - } - - return obj - } - - static createHeadersLenient (obj) { - const headers = new Headers() - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue - } - - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue - } - - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val] - } else { - headers[MAP][name].push(val) - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]] - } - } - return headers - } -} - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true }, -}) - -const getHeaders = (headers, kind = 'key+value') => - Object.keys(headers[MAP]).sort().map( - kind === 'key' ? k => k.toLowerCase() - : kind === 'value' ? k => headers[MAP][k].join(', ') - : k => [k.toLowerCase(), headers[MAP][k].join(', ')] - ) - -const INTERNAL = Symbol('internal') - -class HeadersIterator { - constructor (target, kind) { - this[INTERNAL] = { - target, - kind, - index: 0, - } - } - - get [Symbol.toStringTag] () { - return 'HeadersIterator' - } - - next () { - /* istanbul ignore if: should be impossible */ - if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { - throw new TypeError('Value of `this` is not a HeadersIterator') - } - - const { target, kind, index } = this[INTERNAL] - const values = getHeaders(target, kind) - const len = values.length - if (index >= len) { - return { - value: undefined, - done: true, - } - } - - this[INTERNAL].index++ - - return { value: values[index], done: false } - } -} - -// manually extend because 'extends' requires a ctor -Object.setPrototypeOf(HeadersIterator.prototype, - Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) - -module.exports = Headers diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js b/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js deleted file mode 100644 index da402161670e6..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js +++ /dev/null @@ -1,377 +0,0 @@ -'use strict' -const { URL } = require('url') -const http = require('http') -const https = require('https') -const zlib = require('minizlib') -const { Minipass } = require('minipass') - -const Body = require('./body.js') -const { writeToStream, getTotalBytes } = Body -const Response = require('./response.js') -const Headers = require('./headers.js') -const { createHeadersLenient } = Headers -const Request = require('./request.js') -const { getNodeRequestOptions } = Request -const FetchError = require('./fetch-error.js') -const AbortError = require('./abort-error.js') - -// XXX this should really be split up and unit-ized for easier testing -// and better DRY implementation of data/http request aborting -const fetch = async (url, opts) => { - if (/^data:/.test(url)) { - const request = new Request(url, opts) - // delay 1 promise tick so that the consumer can abort right away - return Promise.resolve().then(() => new Promise((resolve, reject) => { - let type, data - try { - const { pathname, search } = new URL(url) - const split = pathname.split(',') - if (split.length < 2) { - throw new Error('invalid data: URI') - } - const mime = split.shift() - const base64 = /;base64$/.test(mime) - type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime - const rawData = decodeURIComponent(split.join(',') + search) - data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) - } catch (er) { - return reject(new FetchError(`[${request.method}] ${ - request.url} invalid URL, ${er.message}`, 'system', er)) - } - - const { signal } = request - if (signal && signal.aborted) { - return reject(new AbortError('The user aborted a request.')) - } - - const headers = { 'Content-Length': data.length } - if (type) { - headers['Content-Type'] = type - } - return resolve(new Response(data, { headers })) - })) - } - - return new Promise((resolve, reject) => { - // build request object - const request = new Request(url, opts) - let options - try { - options = getNodeRequestOptions(request) - } catch (er) { - return reject(er) - } - - const send = (options.protocol === 'https:' ? https : http).request - const { signal } = request - let response = null - const abort = () => { - const error = new AbortError('The user aborted a request.') - reject(error) - if (Minipass.isStream(request.body) && - typeof request.body.destroy === 'function') { - request.body.destroy(error) - } - if (response && response.body) { - response.body.emit('error', error) - } - } - - if (signal && signal.aborted) { - return abort() - } - - const abortAndFinalize = () => { - abort() - finalize() - } - - const finalize = () => { - req.abort() - if (signal) { - signal.removeEventListener('abort', abortAndFinalize) - } - clearTimeout(reqTimeout) - } - - // send request - const req = send(options) - - if (signal) { - signal.addEventListener('abort', abortAndFinalize) - } - - let reqTimeout = null - if (request.timeout) { - req.once('socket', () => { - reqTimeout = setTimeout(() => { - reject(new FetchError(`network timeout at: ${ - request.url}`, 'request-timeout')) - finalize() - }, request.timeout) - }) - } - - req.on('error', er => { - // if a 'response' event is emitted before the 'error' event, then by the - // time this handler is run it's too late to reject the Promise for the - // response. instead, we forward the error event to the response stream - // so that the error will surface to the user when they try to consume - // the body. this is done as a side effect of aborting the request except - // for in windows, where we must forward the event manually, otherwise - // there is no longer a ref'd socket attached to the request and the - // stream never ends so the event loop runs out of work and the process - // exits without warning. - // coverage skipped here due to the difficulty in testing - // istanbul ignore next - if (req.res) { - req.res.emit('error', er) - } - reject(new FetchError(`request to ${request.url} failed, reason: ${ - er.message}`, 'system', er)) - finalize() - }) - - req.on('response', res => { - clearTimeout(reqTimeout) - - const headers = createHeadersLenient(res.headers) - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location') - - // HTTP fetch step 5.3 - let locationURL = null - try { - locationURL = location === null ? null : new URL(location, request.url).toString() - } catch { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - /* eslint-disable-next-line max-len */ - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) - finalize() - return - } - } - - // HTTP fetch step 5.5 - if (request.redirect === 'error') { - reject(new FetchError('uri requested responds with a redirect, ' + - `redirect mode is set to error: ${request.url}`, 'no-redirect')) - finalize() - return - } else if (request.redirect === 'manual') { - // node-fetch-specific step: make manual redirect a bit easier to - // use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL) - } catch (err) { - /* istanbul ignore next: nodejs server prevent invalid - response headers, we can't test this through normal - request */ - reject(err) - } - } - } else if (request.redirect === 'follow' && locationURL !== null) { - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${ - request.url}`, 'max-redirect')) - finalize() - return - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && - request.body && - getTotalBytes(request) === null) { - reject(new FetchError( - 'Cannot follow redirect with body being a readable stream', - 'unsupported-redirect' - )) - finalize() - return - } - - // Update host due to redirection - request.headers.set('host', (new URL(locationURL)).host) - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - } - - // if the redirect is to a new hostname, strip the authorization and cookie headers - const parsedOriginal = new URL(request.url) - const parsedRedirect = new URL(locationURL) - if (parsedOriginal.hostname !== parsedRedirect.hostname) { - requestOpts.headers.delete('authorization') - requestOpts.headers.delete('cookie') - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || ( - (res.statusCode === 301 || res.statusCode === 302) && - request.method === 'POST' - )) { - requestOpts.method = 'GET' - requestOpts.body = undefined - requestOpts.headers.delete('content-length') - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))) - finalize() - return - } - } // end if(isRedirect) - - // prepare response - res.once('end', () => - signal && signal.removeEventListener('abort', abortAndFinalize)) - - const body = new Minipass() - // if an error occurs, either on the response stream itself, on one of the - // decoder streams, or a response length timeout from the Body class, we - // forward the error through to our internal body stream. If we see an - // error event on that, we call finalize to abort the request and ensure - // we don't leave a socket believing a request is in flight. - // this is difficult to test, so lacks specific coverage. - body.on('error', finalize) - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) - res.on('data', (chunk) => body.write(chunk)) - res.on('end', () => body.end()) - - const responseOptions = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter, - trailer: new Promise(resolveTrailer => - res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), - } - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding') - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || - request.method === 'HEAD' || - codings === null || - res.statusCode === 204 || - res.statusCode === 304) { - response = new Response(body, responseOptions) - resolve(response) - return - } - - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.constants.Z_SYNC_FLUSH, - finishFlush: zlib.constants.Z_SYNC_FLUSH, - } - - // for gzip - if (codings === 'gzip' || codings === 'x-gzip') { - const unzip = new zlib.Gunzip(zlibOptions) - response = new Response( - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), - responseOptions - ) - resolve(response) - return - } - - // for deflate - if (codings === 'deflate' || codings === 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new Minipass()) - raw.once('data', chunk => { - // see http://stackoverflow.com/questions/37519828 - const decoder = (chunk[0] & 0x0F) === 0x08 - ? new zlib.Inflate() - : new zlib.InflateRaw() - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - }) - return - } - - // for br - if (codings === 'br') { - // ignoring coverage so tests don't have to fake support (or lack of) for brotli - // istanbul ignore next - try { - var decoder = new zlib.BrotliDecompress() - } catch (err) { - reject(err) - finalize() - return - } - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - return - } - - // otherwise, use response as-is - response = new Response(body, responseOptions) - resolve(response) - }) - - writeToStream(req, request) - }) -} - -module.exports = fetch - -fetch.isRedirect = code => - code === 301 || - code === 302 || - code === 303 || - code === 307 || - code === 308 - -fetch.Headers = Headers -fetch.Request = Request -fetch.Response = Response -fetch.FetchError = FetchError -fetch.AbortError = AbortError diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js b/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js deleted file mode 100644 index 054439e669910..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js +++ /dev/null @@ -1,282 +0,0 @@ -'use strict' -const { URL } = require('url') -const { Minipass } = require('minipass') -const Headers = require('./headers.js') -const { exportNodeCompatibleHeaders } = Headers -const Body = require('./body.js') -const { clone, extractContentType, getTotalBytes } = Body - -const version = require('../package.json').version -const defaultUserAgent = - `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` - -const INTERNALS = Symbol('Request internals') - -const isRequest = input => - typeof input === 'object' && typeof input[INTERNALS] === 'object' - -const isAbortSignal = signal => { - const proto = ( - signal - && typeof signal === 'object' - && Object.getPrototypeOf(signal) - ) - return !!(proto && proto.constructor.name === 'AbortSignal') -} - -class Request extends Body { - constructor (input, init = {}) { - const parsedURL = isRequest(input) ? new URL(input.url) - : input && input.href ? new URL(input.href) - : new URL(`${input}`) - - if (isRequest(input)) { - init = { ...input[INTERNALS], ...init } - } else if (!input || typeof input === 'string') { - input = {} - } - - const method = (init.method || input.method || 'GET').toUpperCase() - const isGETHEAD = method === 'GET' || method === 'HEAD' - - if ((init.body !== null && init.body !== undefined || - isRequest(input) && input.body !== null) && isGETHEAD) { - throw new TypeError('Request with GET/HEAD method cannot have body') - } - - const inputBody = init.body !== null && init.body !== undefined ? init.body - : isRequest(input) && input.body !== null ? clone(input) - : null - - super(inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0, - }) - - const headers = new Headers(init.headers || input.headers || {}) - - if (inputBody !== null && inputBody !== undefined && - !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - const signal = 'signal' in init ? init.signal - : null - - if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { - throw new TypeError('Expected signal must be an instanceof AbortSignal') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = init - - this[INTERNALS] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow - : input.follow !== undefined ? input.follow - : 20 - this.compress = init.compress !== undefined ? init.compress - : input.compress !== undefined ? input.compress - : true - this.counter = init.counter || input.counter || 0 - this.agent = init.agent || input.agent - } - - get method () { - return this[INTERNALS].method - } - - get url () { - return this[INTERNALS].parsedURL.toString() - } - - get headers () { - return this[INTERNALS].headers - } - - get redirect () { - return this[INTERNALS].redirect - } - - get signal () { - return this[INTERNALS].signal - } - - clone () { - return new Request(this) - } - - get [Symbol.toStringTag] () { - return 'Request' - } - - static getNodeRequestOptions (request) { - const parsedURL = request[INTERNALS].parsedURL - const headers = new Headers(request[INTERNALS].headers) - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*') - } - - // Basic fetch - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported') - } - - if (request.signal && - Minipass.isStream(request.body) && - typeof request.body.destroy !== 'function') { - throw new Error( - 'Cancellation of streamed requests with AbortSignal is not supported') - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - const contentLengthValue = - (request.body === null || request.body === undefined) && - /^(POST|PUT)$/i.test(request.method) ? '0' - : request.body !== null && request.body !== undefined - ? getTotalBytes(request) - : null - - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue + '') - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', defaultUserAgent) - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate') - } - - const agent = typeof request.agent === 'function' - ? request.agent(parsedURL) - : request.agent - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = request[INTERNALS] - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - // we cannot spread parsedURL directly, so we have to read each property one-by-one - // and map them to the equivalent https?.request() method options - const urlProps = { - auth: parsedURL.username || parsedURL.password - ? `${parsedURL.username}:${parsedURL.password}` - : '', - host: parsedURL.host, - hostname: parsedURL.hostname, - path: `${parsedURL.pathname}${parsedURL.search}`, - port: parsedURL.port, - protocol: parsedURL.protocol, - } - - return { - ...urlProps, - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - timeout: request.timeout, - } - } -} - -module.exports = Request - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true }, -}) diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js b/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js deleted file mode 100644 index 54cb52db3594a..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' -const http = require('http') -const { STATUS_CODES } = http - -const Headers = require('./headers.js') -const Body = require('./body.js') -const { clone, extractContentType } = Body - -const INTERNALS = Symbol('Response internals') - -class Response extends Body { - constructor (body = null, opts = {}) { - super(body, opts) - - const status = opts.status || 200 - const headers = new Headers(opts.headers) - - if (body !== null && body !== undefined && !headers.has('Content-Type')) { - const contentType = extractContentType(body) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - this[INTERNALS] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter, - trailer: Promise.resolve(opts.trailer || new Headers()), - } - } - - get trailer () { - return this[INTERNALS].trailer - } - - get url () { - return this[INTERNALS].url || '' - } - - get status () { - return this[INTERNALS].status - } - - get ok () { - return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 - } - - get redirected () { - return this[INTERNALS].counter > 0 - } - - get statusText () { - return this[INTERNALS].statusText - } - - get headers () { - return this[INTERNALS].headers - } - - clone () { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected, - trailer: this.trailer, - }) - } - - get [Symbol.toStringTag] () { - return 'Response' - } -} - -module.exports = Response - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true }, -}) diff --git a/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json b/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json deleted file mode 100644 index d491a7fba126d..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "minipass-fetch", - "version": "3.0.5", - "description": "An implementation of window.fetch in Node.js using Minipass streams", - "license": "MIT", - "main": "lib/index.js", - "scripts": { - "test:tls-fixtures": "./test/fixtures/tls/setup.sh", - "test": "tap", - "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "tap": { - "coverage-map": "map.js", - "check-coverage": true, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "@ungap/url-search-params": "^0.2.2", - "abort-controller": "^3.0.0", - "abortcontroller-polyfill": "~1.7.3", - "encoding": "^0.1.13", - "form-data": "^4.0.0", - "nock": "^13.2.4", - "parted": "^0.1.1", - "string-to-arraybuffer": "^1.0.2", - "tap": "^16.0.0" - }, - "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/minipass-fetch.git" - }, - "keywords": [ - "fetch", - "minipass", - "node-fetch", - "window.fetch" - ], - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", - "publish": "true" - } -} diff --git a/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE b/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE deleted file mode 100644 index 83837797202b7..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) GitHub, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js b/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js deleted file mode 100644 index 86d90861078da..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js +++ /dev/null @@ -1,153 +0,0 @@ -const META = Symbol('proc-log.meta') -module.exports = { - META: META, - output: { - LEVELS: [ - 'standard', - 'error', - 'buffer', - 'flush', - ], - KEYS: { - standard: 'standard', - error: 'error', - buffer: 'buffer', - flush: 'flush', - }, - standard: function (...args) { - return process.emit('output', 'standard', ...args) - }, - error: function (...args) { - return process.emit('output', 'error', ...args) - }, - buffer: function (...args) { - return process.emit('output', 'buffer', ...args) - }, - flush: function (...args) { - return process.emit('output', 'flush', ...args) - }, - }, - log: { - LEVELS: [ - 'notice', - 'error', - 'warn', - 'info', - 'verbose', - 'http', - 'silly', - 'timing', - 'pause', - 'resume', - ], - KEYS: { - notice: 'notice', - error: 'error', - warn: 'warn', - info: 'info', - verbose: 'verbose', - http: 'http', - silly: 'silly', - timing: 'timing', - pause: 'pause', - resume: 'resume', - }, - error: function (...args) { - return process.emit('log', 'error', ...args) - }, - notice: function (...args) { - return process.emit('log', 'notice', ...args) - }, - warn: function (...args) { - return process.emit('log', 'warn', ...args) - }, - info: function (...args) { - return process.emit('log', 'info', ...args) - }, - verbose: function (...args) { - return process.emit('log', 'verbose', ...args) - }, - http: function (...args) { - return process.emit('log', 'http', ...args) - }, - silly: function (...args) { - return process.emit('log', 'silly', ...args) - }, - timing: function (...args) { - return process.emit('log', 'timing', ...args) - }, - pause: function () { - return process.emit('log', 'pause') - }, - resume: function () { - return process.emit('log', 'resume') - }, - }, - time: { - LEVELS: [ - 'start', - 'end', - ], - KEYS: { - start: 'start', - end: 'end', - }, - start: function (name, fn) { - process.emit('time', 'start', name) - function end () { - return process.emit('time', 'end', name) - } - if (typeof fn === 'function') { - const res = fn() - if (res && res.finally) { - return res.finally(end) - } - end() - return res - } - return end - }, - end: function (name) { - return process.emit('time', 'end', name) - }, - }, - input: { - LEVELS: [ - 'start', - 'end', - 'read', - ], - KEYS: { - start: 'start', - end: 'end', - read: 'read', - }, - start: function (fn) { - process.emit('input', 'start') - function end () { - return process.emit('input', 'end') - } - if (typeof fn === 'function') { - const res = fn() - if (res && res.finally) { - return res.finally(end) - } - end() - return res - } - return end - }, - end: function () { - return process.emit('input', 'end') - }, - read: function (...args) { - let resolve, reject - const promise = new Promise((_resolve, _reject) => { - resolve = _resolve - reject = _reject - }) - process.emit('input', 'read', resolve, reject, ...args) - return promise - }, - }, -} diff --git a/node_modules/@sigstore/sign/node_modules/proc-log/package.json b/node_modules/@sigstore/sign/node_modules/proc-log/package.json deleted file mode 100644 index 4ab89102ecc9b..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/proc-log/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "proc-log", - "version": "4.2.0", - "files": [ - "bin/", - "lib/" - ], - "main": "lib/index.js", - "description": "just emit 'log' events on the process object", - "repository": { - "type": "git", - "url": "https://github.com/npm/proc-log.git" - }, - "author": "GitHub Inc.", - "license": "ISC", - "scripts": { - "test": "tap", - "snap": "tap", - "posttest": "npm run lint", - "postsnap": "eslint index.js test/*.js --fix", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", - "tap": "^16.0.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", - "publish": true - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md b/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md deleted file mode 100644 index e335388869f50..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2021 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js b/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js deleted file mode 100644 index 7d749ed480fb9..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js +++ /dev/null @@ -1,580 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { Minipass } = require('minipass') - -const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] -const DEFAULT_ALGORITHMS = ['sha512'] - -// TODO: this should really be a hardcoded list of algorithms we support, -// rather than [a-z0-9]. -const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i -const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ -const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ -const VCHAR_REGEX = /^[\x21-\x7E]+$/ - -const getOptString = options => options?.length ? `?${options.join('?')}` : '' - -class IntegrityStream extends Minipass { - #emittedIntegrity - #emittedSize - #emittedVerified - - constructor (opts) { - super() - this.size = 0 - this.opts = opts - - // may be overridden later, but set now for class consistency - this.#getOptions() - - // options used for calculating stream. can't be changed. - if (opts?.algorithms) { - this.algorithms = [...opts.algorithms] - } else { - this.algorithms = [...DEFAULT_ALGORITHMS] - } - if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { - this.algorithms.push(this.algorithm) - } - - this.hashes = this.algorithms.map(crypto.createHash) - } - - #getOptions () { - // For verification - this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null - this.expectedSize = this.opts?.size - - if (!this.sri) { - this.algorithm = null - } else if (this.sri.isHash) { - this.goodSri = true - this.algorithm = this.sri.algorithm - } else { - this.goodSri = !this.sri.isEmpty() - this.algorithm = this.sri.pickAlgorithm(this.opts) - } - - this.digests = this.goodSri ? this.sri[this.algorithm] : null - this.optString = getOptString(this.opts?.options) - } - - on (ev, handler) { - if (ev === 'size' && this.#emittedSize) { - return handler(this.#emittedSize) - } - - if (ev === 'integrity' && this.#emittedIntegrity) { - return handler(this.#emittedIntegrity) - } - - if (ev === 'verified' && this.#emittedVerified) { - return handler(this.#emittedVerified) - } - - return super.on(ev, handler) - } - - emit (ev, data) { - if (ev === 'end') { - this.#onEnd() - } - return super.emit(ev, data) - } - - write (data) { - this.size += data.length - this.hashes.forEach(h => h.update(data)) - return super.write(data) - } - - #onEnd () { - if (!this.goodSri) { - this.#getOptions() - } - const newSri = parse(this.hashes.map((h, i) => { - return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` - }).join(' '), this.opts) - // Integrity verification mode - const match = this.goodSri && newSri.match(this.sri, this.opts) - if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { - /* eslint-disable-next-line max-len */ - const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) - err.code = 'EBADSIZE' - err.found = this.size - err.expected = this.expectedSize - err.sri = this.sri - this.emit('error', err) - } else if (this.sri && !match) { - /* eslint-disable-next-line max-len */ - const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = this.digests - err.algorithm = this.algorithm - err.sri = this.sri - this.emit('error', err) - } else { - this.#emittedSize = this.size - this.emit('size', this.size) - this.#emittedIntegrity = newSri - this.emit('integrity', newSri) - if (match) { - this.#emittedVerified = match - this.emit('verified', match) - } - } - } -} - -class Hash { - get isHash () { - return true - } - - constructor (hash, opts) { - const strict = opts?.strict - this.source = hash.trim() - - // set default values so that we make V8 happy to - // always see a familiar object template. - this.digest = '' - this.algorithm = '' - this.options = [] - - // 3.1. Integrity metadata (called "Hash" by ssri) - // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description - const match = this.source.match( - strict - ? STRICT_SRI_REGEX - : SRI_REGEX - ) - if (!match) { - return - } - if (strict && !SPEC_ALGORITHMS.includes(match[1])) { - return - } - this.algorithm = match[1] - this.digest = match[2] - - const rawOpts = match[3] - if (rawOpts) { - this.options = rawOpts.slice(1).split('?') - } - } - - hexDigest () { - return this.digest && Buffer.from(this.digest, 'base64').toString('hex') - } - - toJSON () { - return this.toString() - } - - match (integrity, opts) { - const other = parse(integrity, opts) - if (!other) { - return false - } - if (other.isIntegrity) { - const algo = other.pickAlgorithm(opts, [this.algorithm]) - - if (!algo) { - return false - } - - const foundHash = other[algo].find(hash => hash.digest === this.digest) - - if (foundHash) { - return foundHash - } - - return false - } - return other.digest === this.digest ? other : false - } - - toString (opts) { - if (opts?.strict) { - // Strict mode enforces the standard as close to the foot of the - // letter as it can. - if (!( - // The spec has very restricted productions for algorithms. - // https://www.w3.org/TR/CSP2/#source-list-syntax - SPEC_ALGORITHMS.includes(this.algorithm) && - // Usually, if someone insists on using a "different" base64, we - // leave it as-is, since there's multiple standards, and the - // specified is not a URL-safe variant. - // https://www.w3.org/TR/CSP2/#base64_value - this.digest.match(BASE64_REGEX) && - // Option syntax is strictly visual chars. - // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression - // https://tools.ietf.org/html/rfc5234#appendix-B.1 - this.options.every(opt => opt.match(VCHAR_REGEX)) - )) { - return '' - } - } - return `${this.algorithm}-${this.digest}${getOptString(this.options)}` - } -} - -function integrityHashToString (toString, sep, opts, hashes) { - const toStringIsNotEmpty = toString !== '' - - let shouldAddFirstSep = false - let complement = '' - - const lastIndex = hashes.length - 1 - - for (let i = 0; i < lastIndex; i++) { - const hashString = Hash.prototype.toString.call(hashes[i], opts) - - if (hashString) { - shouldAddFirstSep = true - - complement += hashString - complement += sep - } - } - - const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) - - if (finalHashString) { - shouldAddFirstSep = true - complement += finalHashString - } - - if (toStringIsNotEmpty && shouldAddFirstSep) { - return toString + sep + complement - } - - return toString + complement -} - -class Integrity { - get isIntegrity () { - return true - } - - toJSON () { - return this.toString() - } - - isEmpty () { - return Object.keys(this).length === 0 - } - - toString (opts) { - let sep = opts?.sep || ' ' - let toString = '' - - if (opts?.strict) { - // Entries must be separated by whitespace, according to spec. - sep = sep.replace(/\S+/g, ' ') - - for (const hash of SPEC_ALGORITHMS) { - if (this[hash]) { - toString = integrityHashToString(toString, sep, opts, this[hash]) - } - } - } else { - for (const hash of Object.keys(this)) { - toString = integrityHashToString(toString, sep, opts, this[hash]) - } - } - - return toString - } - - concat (integrity, opts) { - const other = typeof integrity === 'string' - ? integrity - : stringify(integrity, opts) - return parse(`${this.toString(opts)} ${other}`, opts) - } - - hexDigest () { - return parse(this, { single: true }).hexDigest() - } - - // add additional hashes to an integrity value, but prevent - // *changing* an existing integrity hash. - merge (integrity, opts) { - const other = parse(integrity, opts) - for (const algo in other) { - if (this[algo]) { - if (!this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest))) { - throw new Error('hashes do not match, cannot update integrity') - } - } else { - this[algo] = other[algo] - } - } - } - - match (integrity, opts) { - const other = parse(integrity, opts) - if (!other) { - return false - } - const algo = other.pickAlgorithm(opts, Object.keys(this)) - return ( - !!algo && - this[algo] && - other[algo] && - this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest - ) - ) - ) || false - } - - // Pick the highest priority algorithm present, optionally also limited to a - // set of hashes found in another integrity. When limiting it may return - // nothing. - pickAlgorithm (opts, hashes) { - const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash - const keys = Object.keys(this).filter(k => { - if (hashes?.length) { - return hashes.includes(k) - } - return true - }) - if (keys.length) { - return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) - } - // no intersection between this and hashes, - return null - } -} - -module.exports.parse = parse -function parse (sri, opts) { - if (!sri) { - return null - } - if (typeof sri === 'string') { - return _parse(sri, opts) - } else if (sri.algorithm && sri.digest) { - const fullSri = new Integrity() - fullSri[sri.algorithm] = [sri] - return _parse(stringify(fullSri, opts), opts) - } else { - return _parse(stringify(sri, opts), opts) - } -} - -function _parse (integrity, opts) { - // 3.4.3. Parse metadata - // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata - if (opts?.single) { - return new Hash(integrity, opts) - } - const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { - const hash = new Hash(string, opts) - if (hash.algorithm && hash.digest) { - const algo = hash.algorithm - if (!acc[algo]) { - acc[algo] = [] - } - acc[algo].push(hash) - } - return acc - }, new Integrity()) - return hashes.isEmpty() ? null : hashes -} - -module.exports.stringify = stringify -function stringify (obj, opts) { - if (obj.algorithm && obj.digest) { - return Hash.prototype.toString.call(obj, opts) - } else if (typeof obj === 'string') { - return stringify(parse(obj, opts), opts) - } else { - return Integrity.prototype.toString.call(obj, opts) - } -} - -module.exports.fromHex = fromHex -function fromHex (hexDigest, algorithm, opts) { - const optString = getOptString(opts?.options) - return parse( - `${algorithm}-${ - Buffer.from(hexDigest, 'hex').toString('base64') - }${optString}`, opts - ) -} - -module.exports.fromData = fromData -function fromData (data, opts) { - const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] - const optString = getOptString(opts?.options) - return algorithms.reduce((acc, algo) => { - const digest = crypto.createHash(algo).update(data).digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the string we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) -} - -module.exports.fromStream = fromStream -function fromStream (stream, opts) { - const istream = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(istream) - stream.on('error', reject) - istream.on('error', reject) - let sri - istream.on('integrity', s => { - sri = s - }) - istream.on('end', () => resolve(sri)) - istream.resume() - }) -} - -module.exports.checkData = checkData -function checkData (data, sri, opts) { - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - if (opts?.error) { - throw Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - ) - } else { - return false - } - } - const algorithm = sri.pickAlgorithm(opts) - const digest = crypto.createHash(algorithm).update(data).digest('base64') - const newSri = parse({ algorithm, digest }) - const match = newSri.match(sri, opts) - opts = opts || {} - if (match || !(opts.error)) { - return match - } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { - /* eslint-disable-next-line max-len */ - const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) - err.code = 'EBADSIZE' - err.found = data.length - err.expected = opts.size - err.sri = sri - throw err - } else { - /* eslint-disable-next-line max-len */ - const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = sri - err.algorithm = algorithm - err.sri = sri - throw err - } -} - -module.exports.checkStream = checkStream -function checkStream (stream, sri, opts) { - opts = opts || Object.create(null) - opts.integrity = sri - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - return Promise.reject(Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - )) - } - const checker = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(checker) - stream.on('error', reject) - checker.on('error', reject) - let verified - checker.on('verified', s => { - verified = s - }) - checker.on('end', () => resolve(verified)) - checker.resume() - }) -} - -module.exports.integrityStream = integrityStream -function integrityStream (opts = Object.create(null)) { - return new IntegrityStream(opts) -} - -module.exports.create = createIntegrity -function createIntegrity (opts) { - const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] - const optString = getOptString(opts?.options) - - const hashes = algorithms.map(crypto.createHash) - - return { - update: function (chunk, enc) { - hashes.forEach(h => h.update(chunk, enc)) - return this - }, - digest: function () { - const integrity = algorithms.reduce((acc, algo) => { - const digest = hashes.shift().digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the hash we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) - - return integrity - }, - } -} - -const NODE_HASHES = crypto.getHashes() - -// This is a Best Effort™ at a reasonable priority for hash algos -const DEFAULT_PRIORITY = [ - 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', - // TODO - it's unclear _which_ of these Node will actually use as its name - // for the algorithm, so we guesswork it based on the OpenSSL names. - 'sha3', - 'sha3-256', 'sha3-384', 'sha3-512', - 'sha3_256', 'sha3_384', 'sha3_512', -].filter(algo => NODE_HASHES.includes(algo)) - -function getPrioritizedHash (algo1, algo2) { - /* eslint-disable-next-line max-len */ - return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) - ? algo1 - : algo2 -} diff --git a/node_modules/@sigstore/sign/node_modules/ssri/package.json b/node_modules/@sigstore/sign/node_modules/ssri/package.json deleted file mode 100644 index 28395414e4643..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/ssri/package.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "name": "ssri", - "version": "10.0.6", - "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "prerelease": "npm t", - "postrelease": "npm publish", - "posttest": "npm run lint", - "test": "tap", - "coverage": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap" - }, - "tap": { - "check-coverage": true, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/ssri.git" - }, - "keywords": [ - "w3c", - "web", - "security", - "integrity", - "checksum", - "hashing", - "subresource integrity", - "sri", - "sri hash", - "sri string", - "sri generator", - "html" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", - "publish": "true" - } -} diff --git a/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE b/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE deleted file mode 100644 index 69619c125ea7e..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE +++ /dev/null @@ -1,5 +0,0 @@ -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js b/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js deleted file mode 100644 index d067d2e709809..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js +++ /dev/null @@ -1,7 +0,0 @@ -var path = require('path') - -var uniqueSlug = require('unique-slug') - -module.exports = function (filepath, prefix, uniq) { - return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) -} diff --git a/node_modules/@sigstore/sign/node_modules/unique-filename/package.json b/node_modules/@sigstore/sign/node_modules/unique-filename/package.json deleted file mode 100644 index b2fbf0666489a..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/unique-filename/package.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "name": "unique-filename", - "version": "3.0.0", - "description": "Generate a unique filename for use in temporary directories or caches.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-filename.git" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/iarna/unique-filename/issues" - }, - "homepage": "https://github.com/iarna/unique-filename", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.5.1", - "tap": "^16.3.0" - }, - "dependencies": { - "unique-slug": "^4.0.0" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE b/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE deleted file mode 100644 index 7953647e7760b..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js b/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js deleted file mode 100644 index 1bac84d95d730..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' -var MurmurHash3 = require('imurmurhash') - -module.exports = function (uniq) { - if (uniq) { - var hash = new MurmurHash3(uniq) - return ('00000000' + hash.result().toString(16)).slice(-8) - } else { - return (Math.random().toString(16) + '0000000').slice(2, 10) - } -} diff --git a/node_modules/@sigstore/sign/node_modules/unique-slug/package.json b/node_modules/@sigstore/sign/node_modules/unique-slug/package.json deleted file mode 100644 index 33732cdbb4285..0000000000000 --- a/node_modules/@sigstore/sign/node_modules/unique-slug/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "unique-slug", - "version": "4.0.0", - "description": "Generate a unique character string suitible for use in files and URLs.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "4.5.1", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-slug.git" - }, - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@sigstore/sign/package.json b/node_modules/@sigstore/sign/package.json deleted file mode 100644 index 4adb3d24c6fa6..0000000000000 --- a/node_modules/@sigstore/sign/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "@sigstore/sign", - "version": "2.3.2", - "description": "Sigstore signing library", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme", - "publishConfig": { - "provenance": true - }, - "devDependencies": { - "@sigstore/jest": "^0.0.0", - "@sigstore/mock": "^0.7.4", - "@sigstore/rekor-types": "^2.0.0", - "@types/make-fetch-happen": "^10.0.4", - "@types/promise-retry": "^1.1.6" - }, - "dependencies": { - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "make-fetch-happen": "^13.0.1", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } -} diff --git a/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/@sigstore/verify/dist/bundle/dsse.js deleted file mode 100644 index 193f875fd1014..0000000000000 --- a/node_modules/@sigstore/verify/dist/bundle/dsse.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DSSESignatureContent = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -class DSSESignatureContent { - constructor(env) { - this.env = env; - } - compareDigest(digest) { - return core_1.crypto.bufferEqual(digest, core_1.crypto.hash(this.env.payload)); - } - compareSignature(signature) { - return core_1.crypto.bufferEqual(signature, this.signature); - } - verifySignature(key) { - return core_1.crypto.verify(this.preAuthEncoding, key, this.signature); - } - get signature() { - return this.env.signatures.length > 0 - ? this.env.signatures[0].sig - : Buffer.from(''); - } - // DSSE Pre-Authentication Encoding - get preAuthEncoding() { - return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload); - } -} -exports.DSSESignatureContent = DSSESignatureContent; diff --git a/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/@sigstore/verify/dist/bundle/index.js deleted file mode 100644 index 63f8d4c499881..0000000000000 --- a/node_modules/@sigstore/verify/dist/bundle/index.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.signatureContent = exports.toSignedEntity = void 0; -const core_1 = require("@sigstore/core"); -const dsse_1 = require("./dsse"); -const message_1 = require("./message"); -function toSignedEntity(bundle, artifact) { - const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial; - const timestamps = []; - for (const entry of tlogEntries) { - timestamps.push({ - $case: 'transparency-log', - tlogEntry: entry, - }); - } - for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) { - timestamps.push({ - $case: 'timestamp-authority', - timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp), - }); - } - return { - signature: signatureContent(bundle, artifact), - key: key(bundle), - tlogEntries, - timestamps, - }; -} -exports.toSignedEntity = toSignedEntity; -function signatureContent(bundle, artifact) { - switch (bundle.content.$case) { - case 'dsseEnvelope': - return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope); - case 'messageSignature': - return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact); - } -} -exports.signatureContent = signatureContent; -function key(bundle) { - switch (bundle.verificationMaterial.content.$case) { - case 'publicKey': - return { - $case: 'public-key', - hint: bundle.verificationMaterial.content.publicKey.hint, - }; - case 'x509CertificateChain': - return { - $case: 'certificate', - certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain - .certificates[0].rawBytes), - }; - case 'certificate': - return { - $case: 'certificate', - certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes), - }; - } -} diff --git a/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/@sigstore/verify/dist/bundle/message.js deleted file mode 100644 index 836148c68a8b6..0000000000000 --- a/node_modules/@sigstore/verify/dist/bundle/message.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MessageSignatureContent = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -class MessageSignatureContent { - constructor(messageSignature, artifact) { - this.signature = messageSignature.signature; - this.messageDigest = messageSignature.messageDigest.digest; - this.artifact = artifact; - } - compareSignature(signature) { - return core_1.crypto.bufferEqual(signature, this.signature); - } - compareDigest(digest) { - return core_1.crypto.bufferEqual(digest, this.messageDigest); - } - verifySignature(key) { - return core_1.crypto.verify(this.artifact, key, this.signature); - } -} -exports.MessageSignatureContent = MessageSignatureContent; diff --git a/node_modules/@sigstore/verify/dist/error.js b/node_modules/@sigstore/verify/dist/error.js deleted file mode 100644 index 6cb1cd4121343..0000000000000 --- a/node_modules/@sigstore/verify/dist/error.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.PolicyError = exports.VerificationError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class BaseError extends Error { - constructor({ code, message, cause, }) { - super(message); - this.code = code; - this.cause = cause; - this.name = this.constructor.name; - } -} -class VerificationError extends BaseError { -} -exports.VerificationError = VerificationError; -class PolicyError extends BaseError { -} -exports.PolicyError = PolicyError; diff --git a/node_modules/@sigstore/verify/dist/index.js b/node_modules/@sigstore/verify/dist/index.js deleted file mode 100644 index 3222876fcd68b..0000000000000 --- a/node_modules/@sigstore/verify/dist/index.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0; -/* istanbul ignore file */ -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var bundle_1 = require("./bundle"); -Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } }); -Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } }); -var trust_1 = require("./trust"); -Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } }); -var verifier_1 = require("./verifier"); -Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } }); diff --git a/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/@sigstore/verify/dist/key/certificate.js deleted file mode 100644 index c9140dd98d58a..0000000000000 --- a/node_modules/@sigstore/verify/dist/key/certificate.js +++ /dev/null @@ -1,205 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CertificateChainVerifier = exports.verifyCertificateChain = void 0; -const error_1 = require("../error"); -const trust_1 = require("../trust"); -function verifyCertificateChain(leaf, certificateAuthorities) { - // Filter list of trusted CAs to those which are valid for the given - // leaf certificate. - const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, { - start: leaf.notBefore, - end: leaf.notAfter, - }); - /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ - let error; - for (const ca of cas) { - try { - const verifier = new CertificateChainVerifier({ - trustedCerts: ca.certChain, - untrustedCert: leaf, - }); - return verifier.verify(); - } - catch (err) { - error = err; - } - } - // If we failed to verify the certificate chain for all of the trusted - // CAs, throw the last error we encountered. - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'Failed to verify certificate chain', - cause: error, - }); -} -exports.verifyCertificateChain = verifyCertificateChain; -class CertificateChainVerifier { - constructor(opts) { - this.untrustedCert = opts.untrustedCert; - this.trustedCerts = opts.trustedCerts; - this.localCerts = dedupeCertificates([ - ...opts.trustedCerts, - opts.untrustedCert, - ]); - } - verify() { - // Construct certificate path from leaf to root - const certificatePath = this.sort(); - // Perform validation checks on each certificate in the path - this.checkPath(certificatePath); - // Return verified certificate path - return certificatePath; - } - sort() { - const leafCert = this.untrustedCert; - // Construct all possible paths from the leaf - let paths = this.buildPaths(leafCert); - // Filter for paths which contain a trusted certificate - paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert))); - if (paths.length === 0) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'no trusted certificate path found', - }); - } - // Find the shortest of possible paths - /* istanbul ignore next */ - const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr); - // Construct chain from shortest path - // Removes the last certificate in the path, which will be a second copy - // of the root certificate given that the root is self-signed. - return [leafCert, ...path].slice(0, -1); - } - // Recursively build all possible paths from the leaf to the root - buildPaths(certificate) { - const paths = []; - const issuers = this.findIssuer(certificate); - if (issuers.length === 0) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'no valid certificate path found', - }); - } - for (let i = 0; i < issuers.length; i++) { - const issuer = issuers[i]; - // Base case - issuer is self - if (issuer.equals(certificate)) { - paths.push([certificate]); - continue; - } - // Recursively build path for the issuer - const subPaths = this.buildPaths(issuer); - // Construct paths by appending the issuer to each subpath - for (let j = 0; j < subPaths.length; j++) { - paths.push([issuer, ...subPaths[j]]); - } - } - return paths; - } - // Return all possible issuers for the given certificate - findIssuer(certificate) { - let issuers = []; - let keyIdentifier; - // Exit early if the certificate is self-signed - if (certificate.subject.equals(certificate.issuer)) { - if (certificate.verify()) { - return [certificate]; - } - } - // If the certificate has an authority key identifier, use that - // to find the issuer - if (certificate.extAuthorityKeyID) { - keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier; - // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber - // though Fulcio doesn't appear to use these - } - // Find possible issuers by comparing the authorityKeyID/subjectKeyID - // or issuer/subject. Potential issuers are added to the result array. - this.localCerts.forEach((possibleIssuer) => { - if (keyIdentifier) { - if (possibleIssuer.extSubjectKeyID) { - if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) { - issuers.push(possibleIssuer); - } - return; - } - } - // Fallback to comparing certificate issuer and subject if - // subjectKey/authorityKey extensions are not present - if (possibleIssuer.subject.equals(certificate.issuer)) { - issuers.push(possibleIssuer); - } - }); - // Remove any issuers which fail to verify the certificate - issuers = issuers.filter((issuer) => { - try { - return certificate.verify(issuer); - } - catch (ex) { - /* istanbul ignore next - should never error */ - return false; - } - }); - return issuers; - } - checkPath(path) { - /* istanbul ignore if */ - if (path.length < 1) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'certificate chain must contain at least one certificate', - }); - } - // Ensure that all certificates beyond the leaf are CAs - const validCAs = path.slice(1).every((cert) => cert.isCA); - if (!validCAs) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'intermediate certificate is not a CA', - }); - } - // Certificate's issuer must match the subject of the next certificate - // in the chain - for (let i = path.length - 2; i >= 0; i--) { - /* istanbul ignore if */ - if (!path[i].issuer.equals(path[i + 1].subject)) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'incorrect certificate name chaining', - }); - } - } - // Check pathlength constraints - for (let i = 0; i < path.length; i++) { - const cert = path[i]; - // If the certificate is a CA, check the path length - if (cert.extBasicConstraints?.isCA) { - const pathLength = cert.extBasicConstraints.pathLenConstraint; - // The path length, if set, indicates how many intermediate - // certificates (NOT including the leaf) are allowed to follow. The - // pathLength constraint of any intermediate CA certificate MUST be - // greater than or equal to it's own depth in the chain (with an - // adjustment for the leaf certificate) - if (pathLength !== undefined && pathLength < i - 1) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'path length constraint exceeded', - }); - } - } - } - } -} -exports.CertificateChainVerifier = CertificateChainVerifier; -// Remove duplicate certificates from the array -function dedupeCertificates(certs) { - for (let i = 0; i < certs.length; i++) { - for (let j = i + 1; j < certs.length; j++) { - if (certs[i].equals(certs[j])) { - certs.splice(j, 1); - j--; - } - } - } - return certs; -} diff --git a/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/@sigstore/verify/dist/key/index.js deleted file mode 100644 index 682a306803a99..0000000000000 --- a/node_modules/@sigstore/verify/dist/key/index.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyCertificate = exports.verifyPublicKey = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const certificate_1 = require("./certificate"); -const sct_1 = require("./sct"); -const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1'; -const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8'; -function verifyPublicKey(hint, timestamps, trustMaterial) { - const key = trustMaterial.publicKey(hint); - timestamps.forEach((timestamp) => { - if (!key.validFor(timestamp)) { - throw new error_1.VerificationError({ - code: 'PUBLIC_KEY_ERROR', - message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`, - }); - } - }); - return { key: key.publicKey }; -} -exports.verifyPublicKey = verifyPublicKey; -function verifyCertificate(leaf, timestamps, trustMaterial) { - // Check that leaf certificate chains to a trusted CA - const path = (0, certificate_1.verifyCertificateChain)(leaf, trustMaterial.certificateAuthorities); - // Check that ALL certificates are valid for ALL of the timestamps - const validForDate = timestamps.every((timestamp) => path.every((cert) => cert.validForDate(timestamp))); - if (!validForDate) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'certificate is not valid or expired at the specified date', - }); - } - return { - scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs), - signer: getSigner(path[0]), - }; -} -exports.verifyCertificate = verifyCertificate; -function getSigner(cert) { - let issuer; - const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2); - if (issuerExtension) { - issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii'); - } - else { - issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii'); - } - const identity = { - extensions: { issuer }, - subjectAlternativeName: cert.subjectAltName, - }; - return { - key: core_1.crypto.createPublicKey(cert.publicKey), - identity, - }; -} diff --git a/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/@sigstore/verify/dist/key/sct.js deleted file mode 100644 index aea412840e103..0000000000000 --- a/node_modules/@sigstore/verify/dist/key/sct.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifySCTs = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const trust_1 = require("../trust"); -function verifySCTs(cert, issuer, ctlogs) { - let extSCT; - // Verifying the SCT requires that we remove the SCT extension and - // re-encode the TBS structure to DER -- this value is part of the data - // over which the signature is calculated. Since this is a destructive action - // we create a copy of the certificate so we can remove the SCT extension - // without affecting the original certificate. - const clone = cert.clone(); - // Intentionally not using the findExtension method here because we want to - // remove the the SCT extension from the certificate before calculating the - // PreCert structure - for (let i = 0; i < clone.extensions.length; i++) { - const ext = clone.extensions[i]; - if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) { - extSCT = new core_1.X509SCTExtension(ext); - // Remove the extension from the certificate - clone.extensions.splice(i, 1); - break; - } - } - // No SCT extension found to verify - if (!extSCT) { - return []; - } - // Found an SCT extension but it has no SCTs - /* istanbul ignore if -- too difficult to fabricate test case for this */ - if (extSCT.signedCertificateTimestamps.length === 0) { - return []; - } - // Construct the PreCert structure - // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 - const preCert = new core_1.ByteStream(); - // Calculate hash of the issuer's public key - const issuerId = core_1.crypto.hash(issuer.publicKey); - preCert.appendView(issuerId); - // Re-encodes the certificate to DER after removing the SCT extension - const tbs = clone.tbsCertificate.toDER(); - preCert.appendUint24(tbs.length); - preCert.appendView(tbs); - // Calculate and return the verification results for each SCT - return extSCT.signedCertificateTimestamps.map((sct) => { - // Find the ctlog instance that corresponds to the SCT's logID - const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, { - logID: sct.logID, - targetDate: sct.datetime, - }); - // See if the SCT is valid for any of the CT logs - const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey)); - if (!verified) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'SCT verification failed', - }); - } - return sct.logID; - }); -} -exports.verifySCTs = verifySCTs; diff --git a/node_modules/@sigstore/verify/dist/policy.js b/node_modules/@sigstore/verify/dist/policy.js deleted file mode 100644 index 731e5c8332847..0000000000000 --- a/node_modules/@sigstore/verify/dist/policy.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyExtensions = exports.verifySubjectAlternativeName = void 0; -const error_1 = require("./error"); -function verifySubjectAlternativeName(policyIdentity, signerIdentity) { - if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) { - throw new error_1.PolicyError({ - code: 'UNTRUSTED_SIGNER_ERROR', - message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`, - }); - } -} -exports.verifySubjectAlternativeName = verifySubjectAlternativeName; -function verifyExtensions(policyExtensions, signerExtensions = {}) { - let key; - for (key in policyExtensions) { - if (signerExtensions[key] !== policyExtensions[key]) { - throw new error_1.PolicyError({ - code: 'UNTRUSTED_SIGNER_ERROR', - message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`, - }); - } - } -} -exports.verifyExtensions = verifyExtensions; diff --git a/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/@sigstore/verify/dist/shared.types.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/@sigstore/verify/dist/shared.types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js deleted file mode 100644 index 04a87383f0fd1..0000000000000 --- a/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js +++ /dev/null @@ -1,158 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyCheckpoint = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const trust_1 = require("../trust"); -// Separator between the note and the signatures in a checkpoint -const CHECKPOINT_SEPARATOR = '\n\n'; -// Checkpoint signatures are of the following form: -// "– \n" -// where: -// - the prefix is an emdash (U+2014). -// - gives a human-readable representation of the signing ID. -// - is the first 4 bytes of the SHA256 hash of the -// associated public key followed by the signature bytes. -const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g; -// Verifies the checkpoint value in the given tlog entry. There are two steps -// to the verification: -// 1. Verify that all signatures in the checkpoint can be verified against a -// trusted public key -// 2. Verify that the root hash in the checkpoint matches the root hash in the -// inclusion proof -// See: https://github.com/transparency-dev/formats/blob/main/log/README.md -function verifyCheckpoint(entry, tlogs) { - // Filter tlog instances to just those which were valid at the time of the - // entry - const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { - targetDate: new Date(Number(entry.integratedTime) * 1000), - }); - const inclusionProof = entry.inclusionProof; - const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope); - const checkpoint = LogCheckpoint.fromString(signedNote.note); - // Verify that the signatures in the checkpoint are all valid - if (!verifySignedNote(signedNote, validTLogs)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'invalid checkpoint signature', - }); - } - // Verify that the root hash from the checkpoint matches the root hash in the - // inclusion proof - if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'root hash mismatch', - }); - } -} -exports.verifyCheckpoint = verifyCheckpoint; -// Verifies the signatures in the SignedNote. For each signature, the -// corresponding transparency log is looked up by the key hint and the -// signature is verified against the public key in the transparency log. -// Throws an error if any of the signatures are invalid. -function verifySignedNote(signedNote, tlogs) { - const data = Buffer.from(signedNote.note, 'utf-8'); - return signedNote.signatures.every((signature) => { - // Find the transparency log instance with the matching key hint - const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint)); - if (!tlog) { - return false; - } - return core_1.crypto.verify(data, tlog.publicKey, signature.signature); - }); -} -// SignedNote represents a signed note from a transparency log checkpoint. Consists -// of a body (or note) and one more signatures calculated over the body. See -// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope -class SignedNote { - constructor(note, signatures) { - this.note = note; - this.signatures = signatures; - } - // Deserialize a SignedNote from a string - static fromString(envelope) { - if (!envelope.includes(CHECKPOINT_SEPARATOR)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'missing checkpoint separator', - }); - } - // Split the note into the header and the data portions at the separator - const split = envelope.indexOf(CHECKPOINT_SEPARATOR); - const header = envelope.slice(0, split + 1); - const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length); - // Find all the signature lines in the data portion - const matches = data.matchAll(SIGNATURE_REGEX); - // Parse each of the matched signature lines into the name and signature. - // The first four bytes of the signature are the key hint (should match the - // first four bytes of the log ID), and the rest is the signature itself. - const signatures = Array.from(matches, (match) => { - const [, name, signature] = match; - const sigBytes = Buffer.from(signature, 'base64'); - if (sigBytes.length < 5) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'malformed checkpoint signature', - }); - } - return { - name, - keyHint: sigBytes.subarray(0, 4), - signature: sigBytes.subarray(4), - }; - }); - if (signatures.length === 0) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'no signatures found in checkpoint', - }); - } - return new SignedNote(header, signatures); - } -} -// LogCheckpoint represents a transparency log checkpoint. Consists of the -// following: -// - origin: the name of the transparency log -// - logSize: the size of the log at the time of the checkpoint -// - logHash: the root hash of the log at the time of the checkpoint -// - rest: the rest of the checkpoint body, which is a list of log entries -// See: -// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body -class LogCheckpoint { - constructor(origin, logSize, logHash, rest) { - this.origin = origin; - this.logSize = logSize; - this.logHash = logHash; - this.rest = rest; - } - static fromString(note) { - const lines = note.trimEnd().split('\n'); - if (lines.length < 3) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'too few lines in checkpoint header', - }); - } - const origin = lines[0]; - const logSize = BigInt(lines[1]); - const rootHash = Buffer.from(lines[2], 'base64'); - const rest = lines.slice(3); - return new LogCheckpoint(origin, logSize, rootHash, rest); - } -} diff --git a/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/@sigstore/verify/dist/timestamp/index.js deleted file mode 100644 index 0da554f648d25..0000000000000 --- a/node_modules/@sigstore/verify/dist/timestamp/index.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyTLogTimestamp = exports.verifyTSATimestamp = void 0; -const error_1 = require("../error"); -const checkpoint_1 = require("./checkpoint"); -const merkle_1 = require("./merkle"); -const set_1 = require("./set"); -const tsa_1 = require("./tsa"); -function verifyTSATimestamp(timestamp, data, timestampAuthorities) { - (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities); - return { - type: 'timestamp-authority', - logID: timestamp.signerSerialNumber, - timestamp: timestamp.signingTime, - }; -} -exports.verifyTSATimestamp = verifyTSATimestamp; -function verifyTLogTimestamp(entry, tlogAuthorities) { - let inclusionVerified = false; - if (isTLogEntryWithInclusionPromise(entry)) { - (0, set_1.verifyTLogSET)(entry, tlogAuthorities); - inclusionVerified = true; - } - if (isTLogEntryWithInclusionProof(entry)) { - (0, merkle_1.verifyMerkleInclusion)(entry); - (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities); - inclusionVerified = true; - } - if (!inclusionVerified) { - throw new error_1.VerificationError({ - code: 'TLOG_MISSING_INCLUSION_ERROR', - message: 'inclusion could not be verified', - }); - } - return { - type: 'transparency-log', - logID: entry.logId.keyId, - timestamp: new Date(Number(entry.integratedTime) * 1000), - }; -} -exports.verifyTLogTimestamp = verifyTLogTimestamp; -function isTLogEntryWithInclusionPromise(entry) { - return entry.inclusionPromise !== undefined; -} -function isTLogEntryWithInclusionProof(entry) { - return entry.inclusionProof !== undefined; -} diff --git a/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/@sigstore/verify/dist/timestamp/merkle.js deleted file mode 100644 index 9895d01b7abc0..0000000000000 --- a/node_modules/@sigstore/verify/dist/timestamp/merkle.js +++ /dev/null @@ -1,105 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyMerkleInclusion = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]); -const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]); -function verifyMerkleInclusion(entry) { - const inclusionProof = entry.inclusionProof; - const logIndex = BigInt(inclusionProof.logIndex); - const treeSize = BigInt(inclusionProof.treeSize); - if (logIndex < 0n || logIndex >= treeSize) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: `invalid index: ${logIndex}`, - }); - } - // Figure out which subset of hashes corresponds to the inner and border - // nodes - const { inner, border } = decompInclProof(logIndex, treeSize); - if (inclusionProof.hashes.length !== inner + border) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'invalid hash count', - }); - } - const innerHashes = inclusionProof.hashes.slice(0, inner); - const borderHashes = inclusionProof.hashes.slice(inner); - // The entry's hash is the leaf hash - const leafHash = hashLeaf(entry.canonicalizedBody); - // Chain the hashes belonging to the inner and border portions - const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes); - // Calculated hash should match the root hash in the inclusion proof - if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'calculated root hash does not match inclusion proof', - }); - } -} -exports.verifyMerkleInclusion = verifyMerkleInclusion; -// Breaks down inclusion proof for a leaf at the specified index in a tree of -// the specified size. The split point is where paths to the index leaf and -// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof -// parts. -function decompInclProof(index, size) { - const inner = innerProofSize(index, size); - const border = onesCount(index >> BigInt(inner)); - return { inner, border }; -} -// Computes a subtree hash for a node on or below the tree's right border. -// Assumes the provided proof hashes are ordered from lower to higher levels -// and seed is the initial hash of the node specified by the index. -function chainInner(seed, hashes, index) { - return hashes.reduce((acc, h, i) => { - if ((index >> BigInt(i)) & BigInt(1)) { - return hashChildren(h, acc); - } - else { - return hashChildren(acc, h); - } - }, seed); -} -// Computes a subtree hash for nodes along the tree's right border. -function chainBorderRight(seed, hashes) { - return hashes.reduce((acc, h) => hashChildren(h, acc), seed); -} -function innerProofSize(index, size) { - return bitLength(index ^ (size - BigInt(1))); -} -// Counts the number of ones in the binary representation of the given number. -// https://en.wikipedia.org/wiki/Hamming_weight -function onesCount(num) { - return num.toString(2).split('1').length - 1; -} -// Returns the number of bits necessary to represent an integer in binary. -function bitLength(n) { - if (n === 0n) { - return 0; - } - return n.toString(2).length; -} -// Hashing logic according to RFC6962. -// https://datatracker.ietf.org/doc/html/rfc6962#section-2 -function hashChildren(left, right) { - return core_1.crypto.hash(RFC6962_NODE_HASH_PREFIX, left, right); -} -function hashLeaf(leaf) { - return core_1.crypto.hash(RFC6962_LEAF_HASH_PREFIX, leaf); -} diff --git a/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/@sigstore/verify/dist/timestamp/set.js deleted file mode 100644 index a6357c06999cb..0000000000000 --- a/node_modules/@sigstore/verify/dist/timestamp/set.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyTLogSET = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const trust_1 = require("../trust"); -// Verifies the SET for the given entry against the list of trusted -// transparency logs. Returns true if the SET can be verified against at least -// one of the trusted logs; otherwise, returns false. -function verifyTLogSET(entry, tlogs) { - // Filter the list of tlog instances to only those which might be able to - // verify the SET - const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { - logID: entry.logId.keyId, - targetDate: new Date(Number(entry.integratedTime) * 1000), - }); - // Check to see if we can verify the SET against any of the valid tlogs - const verified = validTLogs.some((tlog) => { - // Re-create the original Rekor verification payload - const payload = toVerificationPayload(entry); - // Canonicalize the payload and turn into a buffer for verification - const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8'); - // Extract the SET from the tlog entry - const signature = entry.inclusionPromise.signedEntryTimestamp; - return core_1.crypto.verify(data, tlog.publicKey, signature); - }); - if (!verified) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROMISE_ERROR', - message: 'inclusion promise could not be verified', - }); - } -} -exports.verifyTLogSET = verifyTLogSET; -// Returns a properly formatted "VerificationPayload" for one of the -// transaction log entires in the given bundle which can be used for SET -// verification. -function toVerificationPayload(entry) { - const { integratedTime, logIndex, logId, canonicalizedBody } = entry; - return { - body: canonicalizedBody.toString('base64'), - integratedTime: Number(integratedTime), - logIndex: Number(logIndex), - logID: logId.keyId.toString('hex'), - }; -} diff --git a/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/@sigstore/verify/dist/timestamp/tsa.js deleted file mode 100644 index 7b095bc3a7f90..0000000000000 --- a/node_modules/@sigstore/verify/dist/timestamp/tsa.js +++ /dev/null @@ -1,74 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyRFC3161Timestamp = void 0; -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const certificate_1 = require("../key/certificate"); -const trust_1 = require("../trust"); -function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) { - const signingTime = timestamp.signingTime; - // Filter for CAs which were valid at the time of signing - timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, { - start: signingTime, - end: signingTime, - }); - // Filter for CAs which match serial and issuer embedded in the timestamp - timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, { - serialNumber: timestamp.signerSerialNumber, - issuer: timestamp.signerIssuer, - }); - // Check that we can verify the timestamp with AT LEAST ONE of the remaining - // CAs - const verified = timestampAuthorities.some((ca) => { - try { - verifyTimestampForCA(timestamp, data, ca); - return true; - } - catch (e) { - return false; - } - }); - if (!verified) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'timestamp could not be verified', - }); - } -} -exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp; -function verifyTimestampForCA(timestamp, data, ca) { - const [leaf, ...cas] = ca.certChain; - const signingKey = core_1.crypto.createPublicKey(leaf.publicKey); - const signingTime = timestamp.signingTime; - // Verify the certificate chain for the provided CA - try { - new certificate_1.CertificateChainVerifier({ - untrustedCert: leaf, - trustedCerts: cas, - }).verify(); - } - catch (e) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'invalid certificate chain', - }); - } - // Check that all of the CA certs were valid at the time of signing - const validAtSigningTime = ca.certChain.every((cert) => cert.validForDate(signingTime)); - if (!validAtSigningTime) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'timestamp was signed with an expired certificate', - }); - } - // Check that the signing certificate's key can be used to verify the - // timestamp signature. - timestamp.verify(data, signingKey); -} -// Filters the list of CAs to those which have a leaf signing certificate which -// matches the given serial number and issuer. -function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) { - return timestampAuthorities.filter((ca) => ca.certChain.length > 0 && - core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) && - core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer)); -} diff --git a/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/@sigstore/verify/dist/tlog/dsse.js deleted file mode 100644 index bf430e61dde56..0000000000000 --- a/node_modules/@sigstore/verify/dist/tlog/dsse.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyDSSETLogBody = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -// Compare the given intoto tlog entry to the given bundle -function verifyDSSETLogBody(tlogEntry, content) { - switch (tlogEntry.apiVersion) { - case '0.0.1': - return verifyDSSE001TLogBody(tlogEntry, content); - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported dsse version: ${tlogEntry.apiVersion}`, - }); - } -} -exports.verifyDSSETLogBody = verifyDSSETLogBody; -// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope. -function verifyDSSE001TLogBody(tlogEntry, content) { - // Ensure the bundle's DSSE only contains a single signature - if (tlogEntry.spec.signatures?.length !== 1) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'signature count mismatch', - }); - } - const tlogSig = tlogEntry.spec.signatures[0].signature; - // Ensure that the signature in the bundle's DSSE matches tlog entry - if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'tlog entry signature mismatch', - }); - // Ensure the digest of the bundle's DSSE payload matches the digest in the - // tlog entry - const tlogHash = tlogEntry.spec.payloadHash?.value || ''; - if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'DSSE payload hash mismatch', - }); - } -} diff --git a/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js deleted file mode 100644 index d1758858f030d..0000000000000 --- a/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js +++ /dev/null @@ -1,52 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyHashedRekordTLogBody = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -// Compare the given hashedrekord tlog entry to the given bundle -function verifyHashedRekordTLogBody(tlogEntry, content) { - switch (tlogEntry.apiVersion) { - case '0.0.1': - return verifyHashedrekord001TLogBody(tlogEntry, content); - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`, - }); - } -} -exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody; -// Compare the given hashedrekord v0.0.1 tlog entry to the given message -// signature -function verifyHashedrekord001TLogBody(tlogEntry, content) { - // Ensure that the bundles message signature matches the tlog entry - const tlogSig = tlogEntry.spec.signature.content || ''; - if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'signature mismatch', - }); - } - // Ensure that the bundle's message digest matches the tlog entry - const tlogDigest = tlogEntry.spec.data.hash?.value || ''; - if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'digest mismatch', - }); - } -} diff --git a/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/@sigstore/verify/dist/tlog/index.js deleted file mode 100644 index adfc70ed51ad0..0000000000000 --- a/node_modules/@sigstore/verify/dist/tlog/index.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyTLogBody = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -const dsse_1 = require("./dsse"); -const hashedrekord_1 = require("./hashedrekord"); -const intoto_1 = require("./intoto"); -// Verifies that the given tlog entry matches the supplied signature content. -function verifyTLogBody(entry, sigContent) { - const { kind, version } = entry.kindVersion; - const body = JSON.parse(entry.canonicalizedBody.toString('utf8')); - if (kind !== body.kind || version !== body.apiVersion) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`, - }); - } - switch (body.kind) { - case 'dsse': - return (0, dsse_1.verifyDSSETLogBody)(body, sigContent); - case 'intoto': - return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent); - case 'hashedrekord': - return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent); - /* istanbul ignore next */ - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported kind: ${kind}`, - }); - } -} -exports.verifyTLogBody = verifyTLogBody; diff --git a/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/@sigstore/verify/dist/tlog/intoto.js deleted file mode 100644 index 74c7f50d763e1..0000000000000 --- a/node_modules/@sigstore/verify/dist/tlog/intoto.js +++ /dev/null @@ -1,63 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyIntotoTLogBody = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -// Compare the given intoto tlog entry to the given bundle -function verifyIntotoTLogBody(tlogEntry, content) { - switch (tlogEntry.apiVersion) { - case '0.0.2': - return verifyIntoto002TLogBody(tlogEntry, content); - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported intoto version: ${tlogEntry.apiVersion}`, - }); - } -} -exports.verifyIntotoTLogBody = verifyIntotoTLogBody; -// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope. -function verifyIntoto002TLogBody(tlogEntry, content) { - // Ensure the bundle's DSSE contains a single signature - if (tlogEntry.spec.content.envelope.signatures?.length !== 1) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'signature count mismatch', - }); - } - // Signature is double-base64-encoded in the tlog entry - const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig); - // Ensure that the signature in the bundle's DSSE matches tlog entry - if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'tlog entry signature mismatch', - }); - } - // Ensure the digest of the bundle's DSSE payload matches the digest in the - // tlog entry - const tlogHash = tlogEntry.spec.content.payloadHash?.value || ''; - if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'DSSE payload hash mismatch', - }); - } -} -function base64Decode(str) { - return Buffer.from(str, 'base64').toString('utf-8'); -} diff --git a/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/@sigstore/verify/dist/trust/filter.js deleted file mode 100644 index c09d055913c4c..0000000000000 --- a/node_modules/@sigstore/verify/dist/trust/filter.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0; -function filterCertAuthorities(certAuthorities, criteria) { - return certAuthorities.filter((ca) => { - return (ca.validFor.start <= criteria.start && ca.validFor.end >= criteria.end); - }); -} -exports.filterCertAuthorities = filterCertAuthorities; -// Filter the list of tlog instances to only those which match the given log -// ID and have public keys which are valid for the given integrated time. -function filterTLogAuthorities(tlogAuthorities, criteria) { - return tlogAuthorities.filter((tlog) => { - // If we're filtering by log ID and the log IDs don't match, we can't use - // this tlog - if (criteria.logID && !tlog.logID.equals(criteria.logID)) { - return false; - } - // Check that the integrated time is within the validFor range - return (tlog.validFor.start <= criteria.targetDate && - criteria.targetDate <= tlog.validFor.end); - }); -} -exports.filterTLogAuthorities = filterTLogAuthorities; diff --git a/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/@sigstore/verify/dist/trust/index.js deleted file mode 100644 index 954de55841590..0000000000000 --- a/node_modules/@sigstore/verify/dist/trust/index.js +++ /dev/null @@ -1,84 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toTrustMaterial = exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const error_1 = require("../error"); -const BEGINNING_OF_TIME = new Date(0); -const END_OF_TIME = new Date(8640000000000000); -var filter_1 = require("./filter"); -Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } }); -Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } }); -function toTrustMaterial(root, keys) { - const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys); - return { - certificateAuthorities: root.certificateAuthorities.map(createCertAuthority), - timestampAuthorities: root.timestampAuthorities.map(createCertAuthority), - tlogs: root.tlogs.map(createTLogAuthority), - ctlogs: root.ctlogs.map(createTLogAuthority), - publicKey: keyFinder, - }; -} -exports.toTrustMaterial = toTrustMaterial; -function createTLogAuthority(tlogInstance) { - const keyDetails = tlogInstance.publicKey.keyDetails; - const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256 - ? 'pkcs1' - : 'spki'; - return { - logID: tlogInstance.logId.keyId, - publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType), - validFor: { - start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME, - end: tlogInstance.publicKey.validFor?.end || END_OF_TIME, - }, - }; -} -function createCertAuthority(ca) { - return { - certChain: ca.certChain.certificates.map((cert) => { - return core_1.X509Certificate.parse(cert.rawBytes); - }), - validFor: { - start: ca.validFor?.start || BEGINNING_OF_TIME, - end: ca.validFor?.end || END_OF_TIME, - }, - }; -} -function keyLocator(keys) { - return (hint) => { - const key = (keys || {})[hint]; - if (!key) { - throw new error_1.VerificationError({ - code: 'PUBLIC_KEY_ERROR', - message: `key not found: ${hint}`, - }); - } - return { - publicKey: core_1.crypto.createPublicKey(key.rawBytes), - validFor: (date) => { - return ((key.validFor?.start || BEGINNING_OF_TIME) <= date && - (key.validFor?.end || END_OF_TIME) >= date); - }, - }; - }; -} diff --git a/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/@sigstore/verify/dist/trust/trust.types.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/@sigstore/verify/dist/trust/trust.types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/@sigstore/verify/dist/verifier.js deleted file mode 100644 index 829727cd1d40a..0000000000000 --- a/node_modules/@sigstore/verify/dist/verifier.js +++ /dev/null @@ -1,141 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Verifier = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const util_1 = require("util"); -const error_1 = require("./error"); -const key_1 = require("./key"); -const policy_1 = require("./policy"); -const timestamp_1 = require("./timestamp"); -const tlog_1 = require("./tlog"); -class Verifier { - constructor(trustMaterial, options = {}) { - this.trustMaterial = trustMaterial; - this.options = { - ctlogThreshold: options.ctlogThreshold ?? 1, - tlogThreshold: options.tlogThreshold ?? 1, - tsaThreshold: options.tsaThreshold ?? 0, - }; - } - verify(entity, policy) { - const timestamps = this.verifyTimestamps(entity); - const signer = this.verifySigningKey(entity, timestamps); - this.verifyTLogs(entity); - this.verifySignature(entity, signer); - if (policy) { - this.verifyPolicy(policy, signer.identity || {}); - } - return signer; - } - // Checks that all of the timestamps in the entity are valid and returns them - verifyTimestamps(entity) { - let tlogCount = 0; - let tsaCount = 0; - const timestamps = entity.timestamps.map((timestamp) => { - switch (timestamp.$case) { - case 'timestamp-authority': - tsaCount++; - return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities); - case 'transparency-log': - tlogCount++; - return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs); - } - }); - // Check for duplicate timestamps - if (containsDupes(timestamps)) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'duplicate timestamp', - }); - } - if (tlogCount < this.options.tlogThreshold) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`, - }); - } - if (tsaCount < this.options.tsaThreshold) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`, - }); - } - return timestamps.map((t) => t.timestamp); - } - // Checks that the signing key is valid for all of the the supplied timestamps - // and returns the signer. - verifySigningKey({ key }, timestamps) { - switch (key.$case) { - case 'public-key': { - return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial); - } - case 'certificate': { - const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial); - /* istanbul ignore next - no fixture */ - if (containsDupes(result.scts)) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'duplicate SCT', - }); - } - if (result.scts.length < this.options.ctlogThreshold) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`, - }); - } - return result.signer; - } - } - } - // Checks that the tlog entries are valid for the supplied content - verifyTLogs({ signature: content, tlogEntries }) { - tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content)); - } - // Checks that the signature is valid for the supplied content - verifySignature(entity, signer) { - if (!entity.signature.verifySignature(signer.key)) { - throw new error_1.VerificationError({ - code: 'SIGNATURE_ERROR', - message: 'signature verification failed', - }); - } - } - verifyPolicy(policy, identity) { - // Check the subject alternative name of the signer matches the policy - if (policy.subjectAlternativeName) { - (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName); - } - // Check that the extensions of the signer match the policy - if (policy.extensions) { - (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions); - } - } -} -exports.Verifier = Verifier; -// Checks for duplicate items in the array. Objects are compared using -// deep equality. -function containsDupes(arr) { - for (let i = 0; i < arr.length; i++) { - for (let j = i + 1; j < arr.length; j++) { - if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) { - return true; - } - } - } - return false; -} diff --git a/node_modules/@sigstore/verify/package.json b/node_modules/@sigstore/verify/package.json deleted file mode 100644 index cd0c845a797e4..0000000000000 --- a/node_modules/@sigstore/verify/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "@sigstore/verify", - "version": "1.2.1", - "description": "Verification of Sigstore signatures", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme", - "publishConfig": { - "provenance": true - }, - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.1.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } -} diff --git a/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/minizlib/dist/commonjs/constants.js new file mode 100644 index 0000000000000..dfc2c1957bfc9 --- /dev/null +++ b/node_modules/minizlib/dist/commonjs/constants.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.constants = void 0; +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const zlib_1 = __importDefault(require("zlib")); +/* c8 ignore start */ +const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +exports.constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/minizlib/dist/commonjs/index.js b/node_modules/minizlib/dist/commonjs/index.js new file mode 100644 index 0000000000000..ad65eef049507 --- /dev/null +++ b/node_modules/minizlib/dist/commonjs/index.js @@ -0,0 +1,352 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0; +const assert_1 = __importDefault(require("assert")); +const buffer_1 = require("buffer"); +const minipass_1 = require("minipass"); +const zlib_1 = __importDefault(require("zlib")); +const constants_js_1 = require("./constants.js"); +var constants_js_2 = require("./constants.js"); +Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } }); +const OriginalBufferConcat = buffer_1.Buffer.concat; +const _superWrite = Symbol('_superWrite'); +class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +exports.ZlibError = ZlibError; +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends minipass_1.Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new zlib_1.default[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = buffer_1.Buffer.from(chunk, encoding); + if (this.#sawError) + return; + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + buffer_1.Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + buffer_1.Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + buffer_1.Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](buffer_1.Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](buffer_1.Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH; + opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants_js_1.constants.Z_SYNC_FLUSH); + (0, assert_1.default)(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +exports.Zlib = Zlib; +// minimal 2-byte header +class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +exports.Deflate = Deflate; +class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +exports.Inflate = Inflate; +class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +exports.Gzip = Gzip; +class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +exports.Gunzip = Gunzip; +// raw - no header +class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +exports.DeflateRaw = DeflateRaw; +class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +exports.InflateRaw = InflateRaw; +// auto-detect header. +class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +exports.Unzip = Unzip; +class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +exports.Brotli = Brotli; +class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +exports.BrotliCompress = BrotliCompress; +class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +exports.BrotliDecompress = BrotliDecompress; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/minizlib/dist/commonjs/package.json b/node_modules/minizlib/dist/commonjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/minizlib/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/minizlib/dist/esm/constants.js b/node_modules/minizlib/dist/esm/constants.js new file mode 100644 index 0000000000000..7faf40be5068d --- /dev/null +++ b/node_modules/minizlib/dist/esm/constants.js @@ -0,0 +1,117 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +import realZlib from 'zlib'; +/* c8 ignore start */ +const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +export const constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/minizlib/dist/esm/index.js b/node_modules/minizlib/dist/esm/index.js new file mode 100644 index 0000000000000..a6269b505f47c --- /dev/null +++ b/node_modules/minizlib/dist/esm/index.js @@ -0,0 +1,333 @@ +import assert from 'assert'; +import { Buffer } from 'buffer'; +import { Minipass } from 'minipass'; +import realZlib from 'zlib'; +import { constants } from './constants.js'; +export { constants } from './constants.js'; +const OriginalBufferConcat = Buffer.concat; +const _superWrite = Symbol('_superWrite'); +export class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new realZlib[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + assert(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding); + if (this.#sawError) + return; + assert(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +export class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants.Z_FINISH; + opts.fullFlushFlag = constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants.Z_SYNC_FLUSH); + assert(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +// minimal 2-byte header +export class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +export class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +export class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +export class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +// raw - no header +export class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +export class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +// auto-detect header. +export class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +export class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +export class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +export class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/minizlib/dist/esm/package.json b/node_modules/minizlib/dist/esm/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/minizlib/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/sigstore/LICENSE b/node_modules/sigstore/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/sigstore/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/sigstore/dist/config.js b/node_modules/sigstore/dist/config.js deleted file mode 100644 index b4f0eea74fa4b..0000000000000 --- a/node_modules/sigstore/dist/config.js +++ /dev/null @@ -1,116 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createVerificationPolicy = exports.createKeyFinder = exports.createBundleBuilder = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const sign_1 = require("@sigstore/sign"); -const verify_1 = require("@sigstore/verify"); -exports.DEFAULT_RETRY = { retries: 2 }; -exports.DEFAULT_TIMEOUT = 5000; -function createBundleBuilder(bundleType, options) { - const bundlerOptions = { - signer: initSigner(options), - witnesses: initWitnesses(options), - }; - switch (bundleType) { - case 'messageSignature': - return new sign_1.MessageSignatureBundleBuilder(bundlerOptions); - case 'dsseEnvelope': - return new sign_1.DSSEBundleBuilder(bundlerOptions); - } -} -exports.createBundleBuilder = createBundleBuilder; -// Translates the public KeySelector type into the KeyFinderFunc type needed by -// the verifier. -function createKeyFinder(keySelector) { - return (hint) => { - const key = keySelector(hint); - if (!key) { - throw new verify_1.VerificationError({ - code: 'PUBLIC_KEY_ERROR', - message: `key not found: ${hint}`, - }); - } - return { - publicKey: core_1.crypto.createPublicKey(key), - validFor: () => true, - }; - }; -} -exports.createKeyFinder = createKeyFinder; -function createVerificationPolicy(options) { - const policy = {}; - const san = options.certificateIdentityEmail || options.certificateIdentityURI; - if (san) { - policy.subjectAlternativeName = san; - } - if (options.certificateIssuer) { - policy.extensions = { issuer: options.certificateIssuer }; - } - return policy; -} -exports.createVerificationPolicy = createVerificationPolicy; -// Instantiate the FulcioSigner based on the supplied options. -function initSigner(options) { - return new sign_1.FulcioSigner({ - fulcioBaseURL: options.fulcioURL, - identityProvider: options.identityProvider || initIdentityProvider(options), - retry: options.retry ?? exports.DEFAULT_RETRY, - timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, - }); -} -// Instantiate an identity provider based on the supplied options. If an -// explicit identity token is provided, use that. Otherwise, use the CI -// context provider. -function initIdentityProvider(options) { - const token = options.identityToken; - if (token) { - /* istanbul ignore next */ - return { getToken: () => Promise.resolve(token) }; - } - else { - return new sign_1.CIContextProvider('sigstore'); - } -} -// Instantiate a collection of witnesses based on the supplied options. -function initWitnesses(options) { - const witnesses = []; - if (isRekorEnabled(options)) { - witnesses.push(new sign_1.RekorWitness({ - rekorBaseURL: options.rekorURL, - fetchOnConflict: false, - retry: options.retry ?? exports.DEFAULT_RETRY, - timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, - })); - } - if (isTSAEnabled(options)) { - witnesses.push(new sign_1.TSAWitness({ - tsaBaseURL: options.tsaServerURL, - retry: options.retry ?? exports.DEFAULT_RETRY, - timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, - })); - } - return witnesses; -} -// Type assertion to ensure that Rekor is enabled -function isRekorEnabled(options) { - return options.tlogUpload !== false; -} -// Type assertion to ensure that TSA is enabled -function isTSAEnabled(options) { - return options.tsaServerURL !== undefined; -} diff --git a/node_modules/sigstore/dist/index.js b/node_modules/sigstore/dist/index.js deleted file mode 100644 index 7f6a5cf86bbfc..0000000000000 --- a/node_modules/sigstore/dist/index.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0; -/* -Copyright 2022 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var bundle_1 = require("@sigstore/bundle"); -Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } }); -var sign_1 = require("@sigstore/sign"); -Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } }); -Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } }); -Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } }); -var tuf_1 = require("@sigstore/tuf"); -Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } }); -var verify_1 = require("@sigstore/verify"); -Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return verify_1.PolicyError; } }); -Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return verify_1.VerificationError; } }); -var sigstore_1 = require("./sigstore"); -Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } }); -Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } }); -Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } }); -Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } }); diff --git a/node_modules/sigstore/dist/sigstore.js b/node_modules/sigstore/dist/sigstore.js deleted file mode 100644 index 3f6d895f84168..0000000000000 --- a/node_modules/sigstore/dist/sigstore.js +++ /dev/null @@ -1,103 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const bundle_1 = require("@sigstore/bundle"); -const tuf = __importStar(require("@sigstore/tuf")); -const verify_1 = require("@sigstore/verify"); -const config = __importStar(require("./config")); -async function sign(payload, -/* istanbul ignore next */ -options = {}) { - const bundler = config.createBundleBuilder('messageSignature', options); - const bundle = await bundler.create({ data: payload }); - return (0, bundle_1.bundleToJSON)(bundle); -} -exports.sign = sign; -async function attest(payload, payloadType, -/* istanbul ignore next */ -options = {}) { - const bundler = config.createBundleBuilder('dsseEnvelope', options); - const bundle = await bundler.create({ data: payload, type: payloadType }); - return (0, bundle_1.bundleToJSON)(bundle); -} -exports.attest = attest; -async function verify(bundle, dataOrOptions, options) { - let data; - if (Buffer.isBuffer(dataOrOptions)) { - data = dataOrOptions; - } - else { - options = dataOrOptions; - } - return createVerifier(options).then((verifier) => verifier.verify(bundle, data)); -} -exports.verify = verify; -async function createVerifier( -/* istanbul ignore next */ -options = {}) { - const trustedRoot = await tuf.getTrustedRoot({ - mirrorURL: options.tufMirrorURL, - rootPath: options.tufRootPath, - cachePath: options.tufCachePath, - forceCache: options.tufForceCache, - retry: options.retry ?? config.DEFAULT_RETRY, - timeout: options.timeout ?? config.DEFAULT_TIMEOUT, - }); - const keyFinder = options.keySelector - ? config.createKeyFinder(options.keySelector) - : undefined; - const trustMaterial = (0, verify_1.toTrustMaterial)(trustedRoot, keyFinder); - const verifierOptions = { - ctlogThreshold: options.ctLogThreshold, - tlogThreshold: options.tlogThreshold, - }; - const verifier = new verify_1.Verifier(trustMaterial, verifierOptions); - const policy = config.createVerificationPolicy(options); - return { - verify: (bundle, payload) => { - const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle); - const signedEntity = (0, verify_1.toSignedEntity)(deserializedBundle, payload); - verifier.verify(signedEntity, policy); - return; - }, - }; -} -exports.createVerifier = createVerifier; diff --git a/node_modules/sigstore/package.json b/node_modules/sigstore/package.json deleted file mode 100644 index fa8744bf304a3..0000000000000 --- a/node_modules/sigstore/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "sigstore", - "version": "2.3.1", - "description": "code-signing for npm packages", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist", - "store" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/client#readme", - "publishConfig": { - "provenance": true - }, - "devDependencies": { - "@sigstore/rekor-types": "^2.0.0", - "@sigstore/jest": "^0.0.0", - "@sigstore/mock": "^0.7.4", - "@tufjs/repo-mock": "^2.0.1", - "@types/make-fetch-happen": "^10.0.4" - }, - "dependencies": { - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/sign": "^2.3.2", - "@sigstore/tuf": "^2.3.4", - "@sigstore/verify": "^1.2.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } -} diff --git a/package-lock.json b/package-lock.json index bffde21c6c9db..97780eb0cb31d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -228,7 +228,7 @@ "version": "1.0.0", "license": "ISC", "devDependencies": { - "@npmcli/arborist": "^7.1.0", + "@npmcli/arborist": "^7.5.4", "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "json-stringify-safe": "^5.0.1", @@ -258,56 +258,6 @@ "node": "^16.14.0 || >=18.0.0" } }, - "mock-registry/node_modules/@npmcli/arborist": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-7.5.4.tgz", - "integrity": "sha512-nWtIc6QwwoUORCRNzKx4ypHqCk3drI+5aeYdMTQQiRCcn4lOOgfQh7WyZobGYTxXPSq1VwV53lkpN/BRlRk08g==", - "dev": true, - "license": "ISC", - "dependencies": { - "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/fs": "^3.1.1", - "@npmcli/installed-package-contents": "^2.1.0", - "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^7.1.1", - "@npmcli/name-from-folder": "^2.0.0", - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.1.0", - "@npmcli/query": "^3.1.0", - "@npmcli/redact": "^2.0.0", - "@npmcli/run-script": "^8.1.0", - "bin-links": "^4.0.4", - "cacache": "^18.0.3", - "common-ancestor-path": "^1.0.1", - "hosted-git-info": "^7.0.2", - "json-parse-even-better-errors": "^3.0.2", - "json-stringify-nice": "^1.1.4", - "lru-cache": "^10.2.2", - "minimatch": "^9.0.4", - "nopt": "^7.2.1", - "npm-install-checks": "^6.2.0", - "npm-package-arg": "^11.0.2", - "npm-pick-manifest": "^9.0.1", - "npm-registry-fetch": "^17.0.1", - "pacote": "^18.0.6", - "parse-conflict-json": "^3.0.0", - "proc-log": "^4.2.0", - "proggy": "^2.0.0", - "promise-all-reject-late": "^1.0.0", - "promise-call-limit": "^3.0.1", - "read-package-json-fast": "^3.0.2", - "semver": "^7.3.7", - "ssri": "^10.0.6", - "treeverse": "^3.0.0", - "walk-up-path": "^3.0.1" - }, - "bin": { - "arborist": "bin/index.js" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, "mock-registry/node_modules/@npmcli/arborist/node_modules/npm-package-arg": { "version": "11.0.3", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", @@ -3619,6 +3569,7 @@ "version": "2.3.2", "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz", "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==", + "dev": true, "license": "Apache-2.0", "dependencies": { "@sigstore/protobuf-specs": "^0.3.2" @@ -3631,6 +3582,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz", "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", + "dev": true, "license": "Apache-2.0", "engines": { "node": "^16.14.0 || >=18.0.0" @@ -3650,6 +3602,7 @@ "version": "2.3.2", "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz", "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", + "dev": true, "license": "Apache-2.0", "dependencies": { "@sigstore/bundle": "^2.3.2", @@ -3667,6 +3620,7 @@ "version": "2.2.2", "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", + "dev": true, "license": "ISC", "dependencies": { "agent-base": "^7.1.0", @@ -3683,6 +3637,7 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", + "dev": true, "license": "ISC", "dependencies": { "semver": "^7.3.5" @@ -3695,6 +3650,7 @@ "version": "18.0.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz", "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==", + "dev": true, "license": "ISC", "dependencies": { "@npmcli/fs": "^3.1.0", @@ -3718,6 +3674,7 @@ "version": "13.0.1", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", + "dev": true, "license": "ISC", "dependencies": { "@npmcli/agent": "^2.0.0", @@ -3741,6 +3698,7 @@ "version": "3.0.5", "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", + "dev": true, "license": "MIT", "dependencies": { "minipass": "^7.0.3", @@ -3758,6 +3716,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", + "dev": true, "license": "ISC", "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -3767,6 +3726,7 @@ "version": "10.0.6", "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", + "dev": true, "license": "ISC", "dependencies": { "minipass": "^7.0.3" @@ -3779,6 +3739,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", + "dev": true, "license": "ISC", "dependencies": { "unique-slug": "^4.0.0" @@ -3791,6 +3752,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", + "dev": true, "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4" @@ -3817,6 +3779,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz", "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==", + "dev": true, "license": "Apache-2.0", "dependencies": { "@sigstore/bundle": "^2.3.2", @@ -13806,6 +13769,7 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz", "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==", + "dev": true, "license": "Apache-2.0", "dependencies": { "@sigstore/bundle": "^2.3.2", @@ -18319,7 +18283,7 @@ "npm-registry-fetch": "^18.0.1", "proc-log": "^5.0.0", "semver": "^7.3.7", - "sigstore": "^2.2.0", + "sigstore": "^3.0.0", "ssri": "^12.0.0" }, "devDependencies": { @@ -18334,6 +18298,115 @@ "node": "^18.17.0 || >=20.5.0" } }, + "workspaces/libnpmpublish/node_modules/@sigstore/bundle": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.0.0.tgz", + "integrity": "sha512-XDUYX56iMPAn/cdgh/DTJxz5RWmqKV4pwvUAEKEWJl+HzKdCd/24wUa9JYNMlDSCb7SUHAdtksxYX779Nne/Zg==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "workspaces/libnpmpublish/node_modules/@sigstore/core": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", + "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", + "license": "Apache-2.0", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "workspaces/libnpmpublish/node_modules/@sigstore/sign": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.0.0.tgz", + "integrity": "sha512-UjhDMQOkyDoktpXoc5YPJpJK6IooF2gayAr5LvXI4EL7O0vd58okgfRcxuaH+YTdhvb5aa1Q9f+WJ0c2sVuYIw==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^14.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "workspaces/libnpmpublish/node_modules/@sigstore/tuf": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.0.0.tgz", + "integrity": "sha512-9Xxy/8U5OFJu7s+OsHzI96IX/OzjF/zj0BSSaWhgJgTqtlBhQIV2xdrQI5qxLD7+CWWDepadnXAxzaZ3u9cvRw==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "workspaces/libnpmpublish/node_modules/@sigstore/verify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.0.0.tgz", + "integrity": "sha512-Ggtq2GsJuxFNUvQzLoXqRwS4ceRfLAJnrIHUDrzAD0GgnOhwujJkKkxM/s5Bako07c3WtAs/sZo5PJq7VHjeDg==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "workspaces/libnpmpublish/node_modules/@tufjs/models": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", + "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "workspaces/libnpmpublish/node_modules/sigstore": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.0.0.tgz", + "integrity": "sha512-PHMifhh3EN4loMcHCz6l3v/luzgT3za+9f8subGgeMNjbJjzH4Ij/YoX3Gvu+kaouJRIlVdTHHCREADYf+ZteA==", + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^3.0.0", + "@sigstore/tuf": "^3.0.0", + "@sigstore/verify": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "workspaces/libnpmpublish/node_modules/tuf-js": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz", + "integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==", + "license": "MIT", + "dependencies": { + "@tufjs/models": "3.0.1", + "debug": "^4.3.6", + "make-fetch-happen": "^14.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, "workspaces/libnpmsearch": { "version": "8.0.0", "license": "ISC", diff --git a/workspaces/libnpmpublish/lib/publish.js b/workspaces/libnpmpublish/lib/publish.js index 93d546efb5f0e..2bcab4f3ba304 100644 --- a/workspaces/libnpmpublish/lib/publish.js +++ b/workspaces/libnpmpublish/lib/publish.js @@ -137,7 +137,7 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => { if (provenance === true) { await ensureProvenanceGeneration(registry, spec, opts) - provenanceBundle = await generateProvenance([subject], opts) + provenanceBundle = await generateProvenance([subject], { legacyCompatibility: true, ...opts }) /* eslint-disable-next-line max-len */ log.notice('publish', `Signed provenance statement with source and build information from ${ciInfo.name}`) diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json index f63d50f4e7b9c..7819db2cadb56 100644 --- a/workspaces/libnpmpublish/package.json +++ b/workspaces/libnpmpublish/package.json @@ -45,7 +45,7 @@ "npm-registry-fetch": "^18.0.1", "proc-log": "^5.0.0", "semver": "^7.3.7", - "sigstore": "^2.2.0", + "sigstore": "^3.0.0", "ssri": "^12.0.0" }, "engines": { From 0903e69dff1e129d89a40454a5d14376ec8e8e3c Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:53:41 -0800 Subject: [PATCH 15/16] chore: update arborist in mock-registry --- mock-registry/package.json | 2 +- package-lock.json | 692 +------------------------------------ 2 files changed, 4 insertions(+), 690 deletions(-) diff --git a/mock-registry/package.json b/mock-registry/package.json index f4c1687e5714d..37877086d7011 100644 --- a/mock-registry/package.json +++ b/mock-registry/package.json @@ -46,7 +46,7 @@ ] }, "devDependencies": { - "@npmcli/arborist": "^7.5.4", + "@npmcli/arborist": "^8.0.0", "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "json-stringify-safe": "^5.0.1", diff --git a/package-lock.json b/package-lock.json index 97780eb0cb31d..b748a0c968d46 100644 --- a/package-lock.json +++ b/package-lock.json @@ -228,7 +228,7 @@ "version": "1.0.0", "license": "ISC", "devDependencies": { - "@npmcli/arborist": "^7.5.4", + "@npmcli/arborist": "^8.0.0", "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "json-stringify-safe": "^5.0.1", @@ -241,28 +241,11 @@ "node": "^18.17.0 || >=20.5.0" } }, - "mock-registry/node_modules/@npmcli/agent": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", - "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", - "dev": true, - "license": "ISC", - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, "mock-registry/node_modules/@npmcli/arborist/node_modules/npm-package-arg": { "version": "11.0.3", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==", - "dev": true, + "extraneous": true, "license": "ISC", "dependencies": { "hosted-git-info": "^7.0.0", @@ -278,139 +261,7 @@ "version": "18.0.6", "resolved": "https://registry.npmjs.org/pacote/-/pacote-18.0.6.tgz", "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^5.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/package-json": "^5.1.0", - "@npmcli/promise-spawn": "^7.0.0", - "@npmcli/run-script": "^8.0.0", - "cacache": "^18.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^7.0.2", - "npm-package-arg": "^11.0.0", - "npm-packlist": "^8.0.0", - "npm-pick-manifest": "^9.0.0", - "npm-registry-fetch": "^17.0.0", - "proc-log": "^4.0.0", - "promise-retry": "^2.0.1", - "sigstore": "^2.2.0", - "ssri": "^10.0.0", - "tar": "^6.1.11" - }, - "bin": { - "pacote": "bin/index.js" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/fs": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", - "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", - "dev": true, - "license": "ISC", - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/git": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz", - "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/promise-spawn": "^7.0.0", - "ini": "^4.1.3", - "lru-cache": "^10.0.1", - "npm-pick-manifest": "^9.0.0", - "proc-log": "^4.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", - "semver": "^7.3.5", - "which": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/installed-package-contents": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz", - "integrity": "sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-bundled": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "bin": { - "installed-package-contents": "bin/index.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/map-workspaces": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz", - "integrity": "sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/name-from-folder": "^2.0.0", - "glob": "^10.2.2", - "minimatch": "^9.0.0", - "read-package-json-fast": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/metavuln-calculator": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-7.1.1.tgz", - "integrity": "sha512-Nkxf96V0lAx3HCpVda7Vw4P23RILgdi/5K1fmj2tZkWIYLpXAN8k2UVVOsW16TsS5F8Ws2I7Cm+PU1/rsVF47g==", - "dev": true, - "license": "ISC", - "dependencies": { - "cacache": "^18.0.0", - "json-parse-even-better-errors": "^3.0.0", - "pacote": "^18.0.0", - "proc-log": "^4.1.0", - "semver": "^7.3.5" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", - "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^7.0.0", - "proc-log": "^4.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { - "version": "18.0.6", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-18.0.6.tgz", - "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==", - "dev": true, + "extraneous": true, "license": "ISC", "dependencies": { "@npmcli/git": "^5.0.0", @@ -438,543 +289,6 @@ "node": "^16.14.0 || >=18.0.0" } }, - "mock-registry/node_modules/@npmcli/name-from-folder": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-2.0.0.tgz", - "integrity": "sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/node-gyp": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz", - "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/package-json": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz", - "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^5.0.0", - "glob": "^10.2.2", - "hosted-git-info": "^7.0.0", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^6.0.0", - "proc-log": "^4.0.0", - "semver": "^7.5.3" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/promise-spawn": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz", - "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "which": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/query": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/query/-/query-3.1.0.tgz", - "integrity": "sha512-C/iR0tk7KSKGldibYIB9x8GtO/0Bd0I2mhOaDb8ucQL/bQVTmGoeREaFj64Z5+iCBRf3dQfed0CjJL7I8iTkiQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "postcss-selector-parser": "^6.0.10" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/redact": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-2.0.1.tgz", - "integrity": "sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/@npmcli/run-script": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-8.1.0.tgz", - "integrity": "sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.0.0", - "@npmcli/promise-spawn": "^7.0.0", - "node-gyp": "^10.0.0", - "proc-log": "^4.0.0", - "which": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/abbrev": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz", - "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/bin-links": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-4.0.4.tgz", - "integrity": "sha512-cMtq4W5ZsEwcutJrVId+a/tjt8GSbS+h0oNkdl6+6rBuEv8Ot33Bevj5KPm40t309zuhVic8NjpuL42QCiJWWA==", - "dev": true, - "license": "ISC", - "dependencies": { - "cmd-shim": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "read-cmd-shim": "^4.0.0", - "write-file-atomic": "^5.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/cacache": { - "version": "18.0.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz", - "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/cmd-shim": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-6.0.3.tgz", - "integrity": "sha512-FMabTRlc5t5zjdenF6mS0MBeFZm0XqHqeOkcskKFb/LYCcRQ5fVgLOHVc4Lq9CqABd9zhjwPjMBCJvMCziSVtA==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/hosted-git-info": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", - "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/ignore-walk": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.5.tgz", - "integrity": "sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==", - "dev": true, - "license": "ISC", - "dependencies": { - "minimatch": "^9.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/ini": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz", - "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "mock-registry/node_modules/json-parse-even-better-errors": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", - "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/make-fetch-happen": { - "version": "13.0.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", - "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/minipass-fetch": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", - "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, - "mock-registry/node_modules/nopt": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz", - "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==", - "dev": true, - "license": "ISC", - "dependencies": { - "abbrev": "^2.0.0" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/normalize-package-data": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", - "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^7.0.0", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-bundled": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.1.tgz", - "integrity": "sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-install-checks": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz", - "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "semver": "^7.1.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-normalize-package-bin": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz", - "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-packlist": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.2.tgz", - "integrity": "sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==", - "dev": true, - "license": "ISC", - "dependencies": { - "ignore-walk": "^6.0.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-pick-manifest": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz", - "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^11.0.0", - "semver": "^7.3.5" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-pick-manifest/node_modules/npm-package-arg": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", - "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^7.0.0", - "proc-log": "^4.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-registry-fetch": { - "version": "17.1.0", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz", - "integrity": "sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/redact": "^2.0.0", - "jsonparse": "^1.3.1", - "make-fetch-happen": "^13.0.0", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minizlib": "^2.1.2", - "npm-package-arg": "^11.0.0", - "proc-log": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/npm-registry-fetch/node_modules/npm-package-arg": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", - "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^7.0.0", - "proc-log": "^4.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/parse-conflict-json": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz", - "integrity": "sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw==", - "dev": true, - "license": "ISC", - "dependencies": { - "json-parse-even-better-errors": "^3.0.0", - "just-diff": "^6.0.0", - "just-diff-apply": "^5.2.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/proc-log": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", - "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/proggy": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/proggy/-/proggy-2.0.0.tgz", - "integrity": "sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/read-cmd-shim": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-4.0.0.tgz", - "integrity": "sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/read-package-json-fast": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz", - "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==", - "dev": true, - "license": "ISC", - "dependencies": { - "json-parse-even-better-errors": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/ssri": { - "version": "10.0.6", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", - "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/unique-filename": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", - "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", - "dev": true, - "license": "ISC", - "dependencies": { - "unique-slug": "^4.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/unique-slug": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", - "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/validate-npm-package-name": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz", - "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/which": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", - "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^16.13.0 || >=18.0.0" - } - }, - "mock-registry/node_modules/write-file-atomic": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", - "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", - "dev": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/@actions/core": { "version": "1.10.1", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", From ca250c53816982bb39614e280ac7f234cbfd65ca Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 20 Nov 2024 09:57:18 -0800 Subject: [PATCH 16/16] deps: @sigstore/tuf@3.0.0 --- node_modules/.gitignore | 50 +- .../@sigstore/tuf/dist/appdata.js | 43 - .../node_modules/@sigstore/tuf/dist/client.js | 111 --- .../node_modules/@sigstore/tuf/dist/error.js | 12 - .../node_modules/@sigstore/tuf/dist/index.js | 56 -- .../node_modules/@sigstore/tuf/dist/target.js | 79 -- .../node_modules/@sigstore/tuf/package.json | 41 - .../node_modules/@sigstore/tuf/seeds.json | 1 - .../node_modules/sigstore/LICENSE | 202 ----- .../node_modules/tuf-js/LICENSE | 21 - .../node_modules/tuf-js/dist/config.js | 15 - .../node_modules/tuf-js/dist/error.js | 48 -- .../node_modules/tuf-js/dist/fetcher.js | 84 -- .../node_modules/tuf-js/dist/index.js | 9 - .../node_modules/tuf-js/dist/store.js | 208 ----- .../node_modules/tuf-js/dist/updater.js | 350 -------- .../node_modules/tuf-js/dist/utils/tmpfile.js | 25 - .../node_modules/tuf-js/dist/utils/url.js | 13 - .../node_modules/tuf-js/package.json | 43 - node_modules/@sigstore/tuf/dist/appdata.js | 3 +- node_modules/@sigstore/tuf/dist/client.js | 1 - node_modules/@sigstore/tuf/dist/index.js | 6 +- node_modules/@sigstore/tuf/dist/target.js | 3 +- node_modules/@sigstore/tuf/package.json | 8 +- node_modules/@sigstore/tuf/seeds.json | 2 +- node_modules/@tufjs/models/LICENSE | 21 - node_modules/@tufjs/models/dist/base.js | 83 -- .../@tufjs/models/dist/delegations.js | 115 --- node_modules/@tufjs/models/dist/error.js | 27 - node_modules/@tufjs/models/dist/file.js | 183 ----- node_modules/@tufjs/models/dist/index.js | 24 - node_modules/@tufjs/models/dist/key.js | 85 -- node_modules/@tufjs/models/dist/metadata.js | 158 ---- node_modules/@tufjs/models/dist/role.js | 299 ------- node_modules/@tufjs/models/dist/root.js | 116 --- node_modules/@tufjs/models/dist/signature.js | 38 - node_modules/@tufjs/models/dist/snapshot.js | 71 -- node_modules/@tufjs/models/dist/targets.js | 92 --- node_modules/@tufjs/models/dist/timestamp.js | 58 -- .../@tufjs/models/dist/utils/guard.js | 33 - .../@tufjs/models/dist/utils/index.js | 28 - node_modules/@tufjs/models/dist/utils/key.js | 143 ---- node_modules/@tufjs/models/dist/utils/oid.js | 27 - .../@tufjs/models/dist/utils/types.js | 2 - .../@tufjs/models/dist/utils/verify.js | 13 - node_modules/@tufjs/models/package.json | 37 - .../node_modules/@sigstore/bundle/LICENSE | 202 ----- .../@sigstore/bundle/dist/build.js | 100 --- .../@sigstore/bundle/dist/bundle.js | 24 - .../@sigstore/bundle/dist/error.js | 25 - .../@sigstore/bundle/dist/index.js | 43 - .../@sigstore/bundle/dist/serialized.js | 49 -- .../@sigstore/bundle/dist/utility.js | 2 - .../@sigstore/bundle/dist/validate.js | 199 ----- .../@sigstore/bundle/package.json | 35 - .../node_modules/@sigstore/core/LICENSE | 202 ----- .../@sigstore/core/dist/asn1/error.js | 24 - .../@sigstore/core/dist/asn1/index.js | 20 - .../@sigstore/core/dist/asn1/length.js | 62 -- .../@sigstore/core/dist/asn1/obj.js | 152 ---- .../@sigstore/core/dist/asn1/parse.js | 124 --- .../@sigstore/core/dist/asn1/tag.js | 86 -- .../@sigstore/core/dist/crypto.js | 60 -- .../node_modules/@sigstore/core/dist/dsse.js | 30 - .../@sigstore/core/dist/encoding.js | 27 - .../node_modules/@sigstore/core/dist/index.js | 56 -- .../node_modules/@sigstore/core/dist/json.js | 60 -- .../node_modules/@sigstore/core/dist/oid.js | 14 - .../node_modules/@sigstore/core/dist/pem.js | 43 - .../@sigstore/core/dist/rfc3161/error.js | 21 - .../@sigstore/core/dist/rfc3161/index.js | 20 - .../@sigstore/core/dist/rfc3161/timestamp.js | 201 ----- .../@sigstore/core/dist/rfc3161/tstinfo.js | 61 -- .../@sigstore/core/dist/stream.js | 115 --- .../@sigstore/core/dist/x509/cert.js | 230 ------ .../@sigstore/core/dist/x509/ext.js | 145 ---- .../@sigstore/core/dist/x509/index.js | 23 - .../@sigstore/core/dist/x509/sct.js | 141 ---- .../node_modules/@sigstore/core/package.json | 31 - .../node_modules/@sigstore/sign/LICENSE | 202 ----- .../@sigstore/sign/dist/bundler/base.js | 50 -- .../@sigstore/sign/dist/bundler/bundle.js | 71 -- .../@sigstore/sign/dist/bundler/dsse.js | 46 -- .../@sigstore/sign/dist/bundler/index.js | 7 - .../@sigstore/sign/dist/bundler/message.js | 30 - .../node_modules/@sigstore/sign/dist/error.js | 39 - .../@sigstore/sign/dist/external/error.js | 26 - .../@sigstore/sign/dist/external/fetch.js | 98 --- .../@sigstore/sign/dist/external/fulcio.js | 41 - .../@sigstore/sign/dist/external/rekor.js | 80 -- .../@sigstore/sign/dist/external/tsa.js | 38 - .../@sigstore/sign/dist/identity/ci.js | 73 -- .../@sigstore/sign/dist/identity/index.js | 20 - .../@sigstore/sign/dist/identity/provider.js | 2 - .../node_modules/@sigstore/sign/dist/index.js | 17 - .../@sigstore/sign/dist/signer/fulcio/ca.js | 59 -- .../sign/dist/signer/fulcio/ephemeral.js | 45 -- .../sign/dist/signer/fulcio/index.js | 87 -- .../@sigstore/sign/dist/signer/index.js | 22 - .../@sigstore/sign/dist/signer/signer.js | 17 - .../@sigstore/sign/dist/types/fetch.js | 2 - .../@sigstore/sign/dist/util/index.js | 49 -- .../@sigstore/sign/dist/util/oidc.js | 30 - .../@sigstore/sign/dist/util/ua.js | 32 - .../@sigstore/sign/dist/witness/index.js | 24 - .../sign/dist/witness/tlog/client.js | 61 -- .../@sigstore/sign/dist/witness/tlog/entry.js | 140 ---- .../@sigstore/sign/dist/witness/tlog/index.js | 82 -- .../@sigstore/sign/dist/witness/tsa/client.js | 46 -- .../@sigstore/sign/dist/witness/tsa/index.js | 44 - .../@sigstore/sign/dist/witness/witness.js | 2 - .../node_modules/@sigstore/sign/package.json | 46 -- .../pacote/node_modules/@sigstore/tuf/LICENSE | 202 ----- .../@sigstore/tuf/dist/appdata.js | 43 - .../node_modules/@sigstore/tuf/dist/client.js | 111 --- .../node_modules/@sigstore/tuf/dist/error.js | 12 - .../node_modules/@sigstore/tuf/dist/index.js | 56 -- .../node_modules/@sigstore/tuf/dist/target.js | 79 -- .../node_modules/@sigstore/tuf/package.json | 41 - .../node_modules/@sigstore/tuf/seeds.json | 1 - .../@sigstore/verify/dist/bundle/dsse.js | 43 - .../@sigstore/verify/dist/bundle/index.js | 57 -- .../@sigstore/verify/dist/bundle/message.js | 36 - .../@sigstore/verify/dist/error.js | 32 - .../@sigstore/verify/dist/index.js | 28 - .../@sigstore/verify/dist/key/certificate.js | 205 ----- .../@sigstore/verify/dist/key/index.js | 72 -- .../@sigstore/verify/dist/key/sct.js | 78 -- .../@sigstore/verify/dist/policy.js | 24 - .../@sigstore/verify/dist/shared.types.js | 2 - .../verify/dist/timestamp/checkpoint.js | 157 ---- .../@sigstore/verify/dist/timestamp/index.js | 46 -- .../@sigstore/verify/dist/timestamp/merkle.js | 104 --- .../@sigstore/verify/dist/timestamp/set.js | 60 -- .../@sigstore/verify/dist/timestamp/tsa.js | 73 -- .../@sigstore/verify/dist/tlog/dsse.js | 57 -- .../verify/dist/tlog/hashedrekord.js | 51 -- .../@sigstore/verify/dist/tlog/index.js | 47 -- .../@sigstore/verify/dist/tlog/intoto.js | 62 -- .../@sigstore/verify/dist/trust/filter.js | 23 - .../@sigstore/verify/dist/trust/index.js | 86 -- .../verify/dist/trust/trust.types.js | 2 - .../@sigstore/verify/dist/verifier.js | 141 ---- .../@sigstore/verify/package.json | 36 - .../pacote/node_modules/@tufjs/models/LICENSE | 21 - .../node_modules/@tufjs/models/dist/base.js | 92 --- .../@tufjs/models/dist/delegations.js | 115 --- .../node_modules/@tufjs/models/dist/error.js | 27 - .../node_modules/@tufjs/models/dist/file.js | 183 ----- .../node_modules/@tufjs/models/dist/index.js | 24 - .../node_modules/@tufjs/models/dist/key.js | 85 -- .../@tufjs/models/dist/metadata.js | 160 ---- .../node_modules/@tufjs/models/dist/role.js | 299 ------- .../node_modules/@tufjs/models/dist/root.js | 116 --- .../@tufjs/models/dist/signature.js | 38 - .../@tufjs/models/dist/snapshot.js | 71 -- .../@tufjs/models/dist/targets.js | 92 --- .../@tufjs/models/dist/timestamp.js | 58 -- .../@tufjs/models/dist/utils/guard.js | 32 - .../@tufjs/models/dist/utils/index.js | 28 - .../@tufjs/models/dist/utils/key.js | 142 ---- .../@tufjs/models/dist/utils/oid.js | 26 - .../@tufjs/models/dist/utils/types.js | 2 - .../@tufjs/models/dist/utils/verify.js | 13 - .../node_modules/@tufjs/models/package.json | 37 - .../pacote/node_modules/sigstore/LICENSE | 202 ----- .../node_modules/sigstore/dist/config.js | 120 --- .../node_modules/sigstore/dist/index.js | 34 - .../node_modules/sigstore/dist/sigstore.js | 102 --- .../pacote/node_modules/sigstore/package.json | 47 -- .../pacote/node_modules/tuf-js/LICENSE | 21 - .../pacote/node_modules/tuf-js/dist/config.js | 15 - .../pacote/node_modules/tuf-js/dist/error.js | 48 -- .../node_modules/tuf-js/dist/fetcher.js | 84 -- .../pacote/node_modules/tuf-js/dist/index.js | 9 - .../pacote/node_modules/tuf-js/dist/store.js | 208 ----- .../node_modules/tuf-js/dist/updater.js | 350 -------- .../node_modules/tuf-js/dist/utils/tmpfile.js | 25 - .../node_modules/tuf-js/dist/utils/url.js | 13 - .../pacote/node_modules/tuf-js/package.json | 43 - .../@sigstore/bundle => sigstore}/LICENSE | 0 .../node_modules => }/sigstore/dist/config.js | 0 .../node_modules => }/sigstore/dist/index.js | 0 .../sigstore/dist/sigstore.js | 0 .../node_modules/@sigstore/bundle}/LICENSE | 0 .../@sigstore/bundle/dist/build.js | 0 .../@sigstore/bundle/dist/bundle.js | 0 .../@sigstore/bundle/dist/error.js | 0 .../@sigstore/bundle/dist/index.js | 0 .../@sigstore/bundle/dist/serialized.js | 0 .../@sigstore/bundle/dist/utility.js | 0 .../@sigstore/bundle/dist/validate.js | 0 .../@sigstore/bundle/package.json | 0 .../node_modules/@sigstore/core}/LICENSE | 0 .../@sigstore/core/dist/asn1/error.js | 0 .../@sigstore/core/dist/asn1/index.js | 0 .../@sigstore/core/dist/asn1/length.js | 0 .../@sigstore/core/dist/asn1/obj.js | 0 .../@sigstore/core/dist/asn1/parse.js | 0 .../@sigstore/core/dist/asn1/tag.js | 0 .../@sigstore/core/dist/crypto.js | 0 .../node_modules/@sigstore/core/dist/dsse.js | 0 .../@sigstore/core/dist/encoding.js | 0 .../node_modules/@sigstore/core/dist/index.js | 0 .../node_modules/@sigstore/core/dist/json.js | 0 .../node_modules/@sigstore/core/dist/oid.js | 0 .../node_modules/@sigstore/core/dist/pem.js | 0 .../@sigstore/core/dist/rfc3161/error.js | 0 .../@sigstore/core/dist/rfc3161/index.js | 0 .../@sigstore/core/dist/rfc3161/timestamp.js | 0 .../@sigstore/core/dist/rfc3161/tstinfo.js | 0 .../@sigstore/core/dist/stream.js | 0 .../@sigstore/core/dist/x509/cert.js | 0 .../@sigstore/core/dist/x509/ext.js | 0 .../@sigstore/core/dist/x509/index.js | 0 .../@sigstore/core/dist/x509/sct.js | 0 .../node_modules/@sigstore/core/package.json | 0 .../node_modules/@sigstore/sign}/LICENSE | 0 .../@sigstore/sign/dist/bundler/base.js | 0 .../@sigstore/sign/dist/bundler/bundle.js | 0 .../@sigstore/sign/dist/bundler/dsse.js | 0 .../@sigstore/sign/dist/bundler/index.js | 0 .../@sigstore/sign/dist/bundler/message.js | 0 .../node_modules/@sigstore/sign/dist/error.js | 0 .../@sigstore/sign/dist/external/error.js | 0 .../@sigstore/sign/dist/external/fetch.js | 0 .../@sigstore/sign/dist/external/fulcio.js | 0 .../@sigstore/sign/dist/external/rekor.js | 0 .../@sigstore/sign/dist/external/tsa.js | 0 .../@sigstore/sign/dist/identity/ci.js | 0 .../@sigstore/sign/dist/identity/index.js | 0 .../@sigstore/sign/dist/identity/provider.js | 0 .../node_modules/@sigstore/sign/dist/index.js | 0 .../@sigstore/sign/dist/signer/fulcio/ca.js | 0 .../sign/dist/signer/fulcio/ephemeral.js | 0 .../sign/dist/signer/fulcio/index.js | 0 .../@sigstore/sign/dist/signer/index.js | 0 .../@sigstore/sign/dist/signer/signer.js | 0 .../@sigstore/sign/dist/types/fetch.js | 0 .../@sigstore/sign/dist/util/index.js | 0 .../@sigstore/sign/dist/util/oidc.js | 0 .../@sigstore/sign/dist/util/ua.js | 0 .../@sigstore/sign/dist/witness/index.js | 0 .../sign/dist/witness/tlog/client.js | 0 .../@sigstore/sign/dist/witness/tlog/entry.js | 0 .../@sigstore/sign/dist/witness/tlog/index.js | 0 .../@sigstore/sign/dist/witness/tsa/client.js | 0 .../@sigstore/sign/dist/witness/tsa/index.js | 0 .../@sigstore/sign/dist/witness/witness.js | 0 .../node_modules/@sigstore/sign/package.json | 0 .../@sigstore/verify/dist/bundle/dsse.js | 0 .../@sigstore/verify/dist/bundle/index.js | 0 .../@sigstore/verify/dist/bundle/message.js | 0 .../@sigstore/verify/dist/error.js | 0 .../@sigstore/verify/dist/index.js | 0 .../@sigstore/verify/dist/key/certificate.js | 0 .../@sigstore/verify/dist/key/index.js | 0 .../@sigstore/verify/dist/key/sct.js | 0 .../@sigstore/verify/dist/policy.js | 0 .../@sigstore/verify/dist/shared.types.js | 0 .../verify/dist/timestamp/checkpoint.js | 0 .../@sigstore/verify/dist/timestamp/index.js | 0 .../@sigstore/verify/dist/timestamp/merkle.js | 0 .../@sigstore/verify/dist/timestamp/set.js | 0 .../@sigstore/verify/dist/timestamp/tsa.js | 0 .../@sigstore/verify/dist/tlog/dsse.js | 0 .../verify/dist/tlog/hashedrekord.js | 0 .../@sigstore/verify/dist/tlog/index.js | 0 .../@sigstore/verify/dist/tlog/intoto.js | 0 .../@sigstore/verify/dist/trust/filter.js | 0 .../@sigstore/verify/dist/trust/index.js | 0 .../verify/dist/trust/trust.types.js | 0 .../@sigstore/verify/dist/verifier.js | 0 .../@sigstore/verify/package.json | 0 .../node_modules => }/sigstore/package.json | 0 node_modules/tuf-js/dist/config.js | 2 +- node_modules/tuf-js/dist/updater.js | 19 +- node_modules/tuf-js/dist/utils/url.js | 3 +- .../node_modules/@npmcli/agent/lib/agents.js | 206 ----- .../node_modules/@npmcli/agent/lib/dns.js | 53 -- .../node_modules/@npmcli/agent/lib/errors.js | 61 -- .../node_modules/@npmcli/agent/lib/index.js | 56 -- .../node_modules/@npmcli/agent/lib/options.js | 86 -- .../node_modules/@npmcli/agent/lib/proxy.js | 88 -- .../node_modules/@npmcli/agent/package.json | 60 -- .../tuf-js/node_modules/@npmcli/fs/LICENSE.md | 20 - .../@npmcli/fs/lib/common/get-options.js | 20 - .../@npmcli/fs/lib/common/node.js | 9 - .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 - .../node_modules/@npmcli/fs/lib/cp/errors.js | 129 --- .../node_modules/@npmcli/fs/lib/cp/index.js | 22 - .../@npmcli/fs/lib/cp/polyfill.js | 428 ---------- .../node_modules/@npmcli/fs/lib/index.js | 13 - .../node_modules/@npmcli/fs/lib/move-file.js | 78 -- .../@npmcli/fs/lib/readdir-scoped.js | 20 - .../@npmcli/fs/lib/with-temp-dir.js | 39 - .../node_modules/@npmcli/fs/package.json | 52 -- .../node_modules/@tufjs/models/LICENSE | 0 .../node_modules/@tufjs/models/dist/base.js | 0 .../@tufjs/models/dist/delegations.js | 0 .../node_modules/@tufjs/models/dist/error.js | 0 .../node_modules/@tufjs/models/dist/file.js | 0 .../node_modules/@tufjs/models/dist/index.js | 0 .../node_modules/@tufjs/models/dist/key.js | 0 .../@tufjs/models/dist/metadata.js | 0 .../node_modules/@tufjs/models/dist/role.js | 0 .../node_modules/@tufjs/models/dist/root.js | 0 .../@tufjs/models/dist/signature.js | 0 .../@tufjs/models/dist/snapshot.js | 0 .../@tufjs/models/dist/targets.js | 0 .../@tufjs/models/dist/timestamp.js | 0 .../@tufjs/models/dist/utils/guard.js | 0 .../@tufjs/models/dist/utils/index.js | 0 .../@tufjs/models/dist/utils/key.js | 0 .../@tufjs/models/dist/utils/oid.js | 0 .../@tufjs/models/dist/utils/types.js | 0 .../@tufjs/models/dist/utils/verify.js | 0 .../node_modules/@tufjs/models/package.json | 0 .../tuf-js/node_modules/cacache/LICENSE.md | 16 - .../node_modules/cacache/lib/content/path.js | 29 - .../node_modules/cacache/lib/content/read.js | 165 ---- .../node_modules/cacache/lib/content/rm.js | 18 - .../node_modules/cacache/lib/content/write.js | 206 ----- .../node_modules/cacache/lib/entry-index.js | 336 -------- .../tuf-js/node_modules/cacache/lib/get.js | 170 ---- .../tuf-js/node_modules/cacache/lib/index.js | 42 - .../node_modules/cacache/lib/memoization.js | 72 -- .../tuf-js/node_modules/cacache/lib/put.js | 80 -- .../tuf-js/node_modules/cacache/lib/rm.js | 31 - .../node_modules/cacache/lib/util/glob.js | 7 - .../cacache/lib/util/hash-to-segments.js | 7 - .../node_modules/cacache/lib/util/tmp.js | 26 - .../tuf-js/node_modules/cacache/lib/verify.js | 257 ------ .../tuf-js/node_modules/cacache/package.json | 82 -- .../node_modules/make-fetch-happen/LICENSE | 16 - .../make-fetch-happen/lib/cache/entry.js | 471 ----------- .../make-fetch-happen/lib/cache/errors.js | 11 - .../make-fetch-happen/lib/cache/index.js | 49 -- .../make-fetch-happen/lib/cache/key.js | 17 - .../make-fetch-happen/lib/cache/policy.js | 161 ---- .../make-fetch-happen/lib/fetch.js | 118 --- .../make-fetch-happen/lib/index.js | 41 - .../make-fetch-happen/lib/options.js | 54 -- .../make-fetch-happen/lib/pipeline.js | 41 - .../make-fetch-happen/lib/remote.js | 131 --- .../make-fetch-happen/package.json | 75 -- .../node_modules/minipass-fetch/LICENSE | 28 - .../minipass-fetch/lib/abort-error.js | 17 - .../node_modules/minipass-fetch/lib/blob.js | 97 --- .../node_modules/minipass-fetch/lib/body.js | 350 -------- .../minipass-fetch/lib/fetch-error.js | 32 - .../minipass-fetch/lib/headers.js | 267 ------- .../node_modules/minipass-fetch/lib/index.js | 377 --------- .../minipass-fetch/lib/request.js | 282 ------- .../minipass-fetch/lib/response.js | 90 --- .../node_modules/minipass-fetch/package.json | 69 -- .../tuf-js/node_modules/proc-log/LICENSE | 15 - .../tuf-js/node_modules/proc-log/lib/index.js | 153 ---- .../tuf-js/node_modules/proc-log/package.json | 45 -- .../tuf-js/node_modules/ssri/LICENSE.md | 16 - .../tuf-js/node_modules/ssri/lib/index.js | 580 -------------- .../tuf-js/node_modules/ssri/package.json | 65 -- .../node_modules/unique-filename/LICENSE | 5 - .../node_modules/unique-filename/lib/index.js | 7 - .../node_modules/unique-filename/package.json | 51 -- .../tuf-js/node_modules/unique-slug/LICENSE | 15 - .../node_modules/unique-slug/lib/index.js | 11 - .../node_modules/unique-slug/package.json | 47 -- node_modules/tuf-js/package.json | 14 +- package-lock.json | 755 +++++------------- package.json | 2 +- 371 files changed, 241 insertions(+), 19748 deletions(-) delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json delete mode 100644 node_modules/@tufjs/models/LICENSE delete mode 100644 node_modules/@tufjs/models/dist/base.js delete mode 100644 node_modules/@tufjs/models/dist/delegations.js delete mode 100644 node_modules/@tufjs/models/dist/error.js delete mode 100644 node_modules/@tufjs/models/dist/file.js delete mode 100644 node_modules/@tufjs/models/dist/index.js delete mode 100644 node_modules/@tufjs/models/dist/key.js delete mode 100644 node_modules/@tufjs/models/dist/metadata.js delete mode 100644 node_modules/@tufjs/models/dist/role.js delete mode 100644 node_modules/@tufjs/models/dist/root.js delete mode 100644 node_modules/@tufjs/models/dist/signature.js delete mode 100644 node_modules/@tufjs/models/dist/snapshot.js delete mode 100644 node_modules/@tufjs/models/dist/targets.js delete mode 100644 node_modules/@tufjs/models/dist/timestamp.js delete mode 100644 node_modules/@tufjs/models/dist/utils/guard.js delete mode 100644 node_modules/@tufjs/models/dist/utils/index.js delete mode 100644 node_modules/@tufjs/models/dist/utils/key.js delete mode 100644 node_modules/@tufjs/models/dist/utils/oid.js delete mode 100644 node_modules/@tufjs/models/dist/utils/types.js delete mode 100644 node_modules/@tufjs/models/dist/utils/verify.js delete mode 100644 node_modules/@tufjs/models/package.json delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/LICENSE delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/package.json delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/LICENSE delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/json.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/oid.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/pem.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/stream.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/package.json delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/LICENSE delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/error.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/package.json delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/LICENSE delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/package.json delete mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/seeds.json delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/error.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/package.json delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/LICENSE delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/base.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/error.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/file.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/index.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/key.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/role.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/root.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/signature.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/targets.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/package.json delete mode 100644 node_modules/pacote/node_modules/sigstore/LICENSE delete mode 100644 node_modules/pacote/node_modules/sigstore/dist/config.js delete mode 100644 node_modules/pacote/node_modules/sigstore/dist/index.js delete mode 100644 node_modules/pacote/node_modules/sigstore/dist/sigstore.js delete mode 100644 node_modules/pacote/node_modules/sigstore/package.json delete mode 100644 node_modules/pacote/node_modules/tuf-js/LICENSE delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/config.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/error.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/fetcher.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/index.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/store.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/updater.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/dist/utils/url.js delete mode 100644 node_modules/pacote/node_modules/tuf-js/package.json rename node_modules/{@npmcli/metavuln-calculator/node_modules/@sigstore/bundle => sigstore}/LICENSE (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/sigstore/dist/config.js (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/sigstore/dist/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/sigstore/dist/sigstore.js (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules/@sigstore/core => sigstore/node_modules/@sigstore/bundle}/LICENSE (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/dist/build.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/dist/bundle.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/dist/error.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/dist/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/dist/serialized.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/dist/utility.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/dist/validate.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/bundle/package.json (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules/@sigstore/sign => sigstore/node_modules/@sigstore/core}/LICENSE (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/asn1/error.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/asn1/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/asn1/length.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/asn1/obj.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/asn1/parse.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/asn1/tag.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/crypto.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/dsse.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/encoding.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/json.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/oid.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/pem.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/rfc3161/error.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/rfc3161/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/rfc3161/timestamp.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/stream.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/x509/cert.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/x509/ext.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/x509/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/dist/x509/sct.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/core/package.json (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules/@sigstore/tuf => sigstore/node_modules/@sigstore/sign}/LICENSE (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/bundler/base.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/bundler/bundle.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/bundler/dsse.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/bundler/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/bundler/message.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/error.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/external/error.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/external/fetch.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/external/fulcio.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/external/rekor.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/external/tsa.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/identity/ci.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/identity/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/identity/provider.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/signer/fulcio/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/signer/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/signer/signer.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/types/fetch.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/util/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/util/oidc.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/util/ua.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/witness/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/witness/tlog/client.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/witness/tlog/entry.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/witness/tlog/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/witness/tsa/client.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/witness/tsa/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/dist/witness/witness.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/sign/package.json (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/bundle/dsse.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/bundle/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/bundle/message.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/error.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/key/certificate.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/key/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/key/sct.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/policy.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/shared.types.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/timestamp/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/timestamp/merkle.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/timestamp/set.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/timestamp/tsa.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/tlog/dsse.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/tlog/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/tlog/intoto.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/trust/filter.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/trust/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/trust/trust.types.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/dist/verifier.js (100%) rename node_modules/{@npmcli/metavuln-calculator => sigstore}/node_modules/@sigstore/verify/package.json (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/sigstore/package.json (100%) delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/agent/package.json delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js delete mode 100644 node_modules/tuf-js/node_modules/@npmcli/fs/package.json rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/LICENSE (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/base.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/delegations.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/error.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/file.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/key.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/metadata.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/role.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/root.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/signature.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/snapshot.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/targets.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/timestamp.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/utils/guard.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/utils/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/utils/key.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/utils/oid.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/utils/types.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/dist/utils/verify.js (100%) rename node_modules/{@npmcli/metavuln-calculator => tuf-js}/node_modules/@tufjs/models/package.json (100%) delete mode 100644 node_modules/tuf-js/node_modules/cacache/LICENSE.md delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/path.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/read.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/rm.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/write.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/entry-index.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/get.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/memoization.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/put.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/rm.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/glob.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/verify.js delete mode 100644 node_modules/tuf-js/node_modules/cacache/package.json delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/package.json delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/LICENSE delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/fetch-error.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js delete mode 100644 node_modules/tuf-js/node_modules/minipass-fetch/package.json delete mode 100644 node_modules/tuf-js/node_modules/proc-log/LICENSE delete mode 100644 node_modules/tuf-js/node_modules/proc-log/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/proc-log/package.json delete mode 100644 node_modules/tuf-js/node_modules/ssri/LICENSE.md delete mode 100644 node_modules/tuf-js/node_modules/ssri/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/ssri/package.json delete mode 100644 node_modules/tuf-js/node_modules/unique-filename/LICENSE delete mode 100644 node_modules/tuf-js/node_modules/unique-filename/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/unique-filename/package.json delete mode 100644 node_modules/tuf-js/node_modules/unique-slug/LICENSE delete mode 100644 node_modules/tuf-js/node_modules/unique-slug/lib/index.js delete mode 100644 node_modules/tuf-js/node_modules/unique-slug/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 66bb3a00dac78..c6b5ae1809e8d 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -24,19 +24,7 @@ !/@npmcli/metavuln-calculator !/@npmcli/metavuln-calculator/node_modules/ /@npmcli/metavuln-calculator/node_modules/* -!/@npmcli/metavuln-calculator/node_modules/@sigstore/ -/@npmcli/metavuln-calculator/node_modules/@sigstore/* -!/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle -!/@npmcli/metavuln-calculator/node_modules/@sigstore/core -!/@npmcli/metavuln-calculator/node_modules/@sigstore/sign -!/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf -!/@npmcli/metavuln-calculator/node_modules/@sigstore/verify -!/@npmcli/metavuln-calculator/node_modules/@tufjs/ -/@npmcli/metavuln-calculator/node_modules/@tufjs/* -!/@npmcli/metavuln-calculator/node_modules/@tufjs/models !/@npmcli/metavuln-calculator/node_modules/pacote -!/@npmcli/metavuln-calculator/node_modules/sigstore -!/@npmcli/metavuln-calculator/node_modules/tuf-js !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json @@ -54,7 +42,6 @@ !/@tufjs/ /@tufjs/* !/@tufjs/canonical-json -!/@tufjs/models !/abbrev !/agent-base !/aggregate-error @@ -197,20 +184,6 @@ !/p-map !/package-json-from-dist !/pacote -!/pacote/node_modules/ -/pacote/node_modules/* -!/pacote/node_modules/@sigstore/ -/pacote/node_modules/@sigstore/* -!/pacote/node_modules/@sigstore/bundle -!/pacote/node_modules/@sigstore/core -!/pacote/node_modules/@sigstore/sign -!/pacote/node_modules/@sigstore/tuf -!/pacote/node_modules/@sigstore/verify -!/pacote/node_modules/@tufjs/ -/pacote/node_modules/@tufjs/* -!/pacote/node_modules/@tufjs/models -!/pacote/node_modules/sigstore -!/pacote/node_modules/tuf-js !/parse-conflict-json !/path-key !/path-scurry @@ -233,6 +206,15 @@ !/shebang-command !/shebang-regex !/signal-exit +!/sigstore +!/sigstore/node_modules/ +/sigstore/node_modules/* +!/sigstore/node_modules/@sigstore/ +/sigstore/node_modules/@sigstore/* +!/sigstore/node_modules/@sigstore/bundle +!/sigstore/node_modules/@sigstore/core +!/sigstore/node_modules/@sigstore/sign +!/sigstore/node_modules/@sigstore/verify !/smart-buffer !/socks-proxy-agent !/socks @@ -264,17 +246,9 @@ !/tuf-js !/tuf-js/node_modules/ /tuf-js/node_modules/* -!/tuf-js/node_modules/@npmcli/ -/tuf-js/node_modules/@npmcli/* -!/tuf-js/node_modules/@npmcli/agent -!/tuf-js/node_modules/@npmcli/fs -!/tuf-js/node_modules/cacache -!/tuf-js/node_modules/make-fetch-happen -!/tuf-js/node_modules/minipass-fetch -!/tuf-js/node_modules/proc-log -!/tuf-js/node_modules/ssri -!/tuf-js/node_modules/unique-filename -!/tuf-js/node_modules/unique-slug +!/tuf-js/node_modules/@tufjs/ +/tuf-js/node_modules/@tufjs/* +!/tuf-js/node_modules/@tufjs/models !/unique-filename !/unique-slug !/util-deprecate diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js deleted file mode 100644 index 06a8143e70da2..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/appdata.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.appDataPath = appDataPath; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const os_1 = __importDefault(require("os")); -const path_1 = __importDefault(require("path")); -function appDataPath(name) { - const homedir = os_1.default.homedir(); - switch (process.platform) { - /* istanbul ignore next */ - case 'darwin': { - const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); - return path_1.default.join(appSupport, name); - } - /* istanbul ignore next */ - case 'win32': { - const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); - return path_1.default.join(localAppData, name, 'Data'); - } - /* istanbul ignore next */ - default: { - const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); - return path_1.default.join(localData, name); - } - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js deleted file mode 100644 index 328f49e40dbbd..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/client.js +++ /dev/null @@ -1,111 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const tuf_js_1 = require("tuf-js"); -const _1 = require("."); -const target_1 = require("./target"); -const TARGETS_DIR_NAME = 'targets'; -class TUFClient { - constructor(options) { - const url = new URL(options.mirrorURL); - const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, '')); - const cachePath = path_1.default.join(options.cachePath, repoName); - initTufCache(cachePath); - seedCache({ - cachePath, - mirrorURL: options.mirrorURL, - tufRootPath: options.rootPath, - forceInit: options.forceInit, - }); - this.updater = initClient({ - mirrorURL: options.mirrorURL, - cachePath, - forceCache: options.forceCache, - retry: options.retry, - timeout: options.timeout, - }); - } - async refresh() { - return this.updater.refresh(); - } - getTarget(targetName) { - return (0, target_1.readTarget)(this.updater, targetName); - } -} -exports.TUFClient = TUFClient; -// Initializes the TUF cache directory structure including the initial -// root.json file. If the cache directory does not exist, it will be -// created. If the targets directory does not exist, it will be created. -// If the root.json file does not exist, it will be copied from the -// rootPath argument. -function initTufCache(cachePath) { - const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME); - if (!fs_1.default.existsSync(cachePath)) { - fs_1.default.mkdirSync(cachePath, { recursive: true }); - } - if (!fs_1.default.existsSync(targetsPath)) { - fs_1.default.mkdirSync(targetsPath); - } -} -// Populates the TUF cache with the initial root.json file. If the root.json -// file does not exist (or we're forcing re-initialization), copy it from either -// the rootPath argument or from one of the repo seeds. -function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) { - const cachedRootPath = path_1.default.join(cachePath, 'root.json'); - // If the root.json file does not exist (or we're forcing re-initialization), - // populate it either from the supplied rootPath or from one of the repo seeds. - if (!fs_1.default.existsSync(cachedRootPath) || forceInit) { - if (tufRootPath) { - fs_1.default.copyFileSync(tufRootPath, cachedRootPath); - } - else { - const seeds = require('../seeds.json'); - const repoSeed = seeds[mirrorURL]; - if (!repoSeed) { - throw new _1.TUFError({ - code: 'TUF_INIT_CACHE_ERROR', - message: `No root.json found for mirror: ${mirrorURL}`, - }); - } - fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64')); - // Copy any seed targets into the cache - Object.entries(repoSeed.targets).forEach(([targetName, target]) => { - fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64')); - }); - } - } -} -function initClient(options) { - const config = { - fetchTimeout: options.timeout, - fetchRetry: options.retry, - }; - return new tuf_js_1.Updater({ - metadataBaseUrl: options.mirrorURL, - targetBaseUrl: `${options.mirrorURL}/targets`, - metadataDir: options.cachePath, - targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME), - forceCache: options.forceCache, - config, - }); -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js deleted file mode 100644 index e13971b289ff2..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/error.js +++ /dev/null @@ -1,12 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFError = void 0; -class TUFError extends Error { - constructor({ code, message, cause, }) { - super(message); - this.code = code; - this.cause = cause; - this.name = this.constructor.name; - } -} -exports.TUFError = TUFError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js deleted file mode 100644 index 2af5de93ec5d2..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/index.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0; -exports.getTrustedRoot = getTrustedRoot; -exports.initTUF = initTUF; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const appdata_1 = require("./appdata"); -const client_1 = require("./client"); -exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; -const DEFAULT_CACHE_DIR = 'sigstore-js'; -const DEFAULT_RETRY = { retries: 2 }; -const DEFAULT_TIMEOUT = 5000; -const TRUSTED_ROOT_TARGET = 'trusted_root.json'; -async function getTrustedRoot( -/* istanbul ignore next */ -options = {}) { - const client = createClient(options); - const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); - return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); -} -async function initTUF( -/* istanbul ignore next */ -options = {}) { - const client = createClient(options); - return client.refresh().then(() => client); -} -// Create a TUF client with default options -function createClient(options) { - /* istanbul ignore next */ - return new client_1.TUFClient({ - cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), - rootPath: options.rootPath, - mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL, - retry: options.retry ?? DEFAULT_RETRY, - timeout: options.timeout ?? DEFAULT_TIMEOUT, - forceCache: options.forceCache ?? false, - forceInit: options.forceInit ?? options.force ?? false, - }); -} -var error_1 = require("./error"); -Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js deleted file mode 100644 index 5c6675bdfbf5f..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/dist/target.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readTarget = readTarget; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fs_1 = __importDefault(require("fs")); -const error_1 = require("./error"); -// Downloads and returns the specified target from the provided TUF Updater. -async function readTarget(tuf, targetPath) { - const path = await getTargetPath(tuf, targetPath); - return new Promise((resolve, reject) => { - fs_1.default.readFile(path, 'utf-8', (err, data) => { - if (err) { - reject(new error_1.TUFError({ - code: 'TUF_READ_TARGET_ERROR', - message: `error reading target ${path}`, - cause: err, - })); - } - else { - resolve(data); - } - }); - }); -} -// Returns the local path to the specified target. If the target is not yet -// cached locally, the provided TUF Updater will be used to download and -// cache the target. -async function getTargetPath(tuf, target) { - let targetInfo; - try { - targetInfo = await tuf.getTargetInfo(target); - } - catch (err) { - throw new error_1.TUFError({ - code: 'TUF_REFRESH_METADATA_ERROR', - message: 'error refreshing TUF metadata', - cause: err, - }); - } - if (!targetInfo) { - throw new error_1.TUFError({ - code: 'TUF_FIND_TARGET_ERROR', - message: `target ${target} not found`, - }); - } - let path = await tuf.findCachedTarget(targetInfo); - // An empty path here means the target has not been cached locally, or is - // out of date. In either case, we need to download it. - if (!path) { - try { - path = await tuf.downloadTarget(targetInfo); - } - catch (err) { - throw new error_1.TUFError({ - code: 'TUF_DOWNLOAD_TARGET_ERROR', - message: `error downloading target ${path}`, - cause: err, - }); - } - } - return path; -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json deleted file mode 100644 index 808689dfddf92..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/package.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "name": "@sigstore/tuf", - "version": "3.0.0", - "description": "Client for the Sigstore TUF repository", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist", - "seeds.json" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", - "publishConfig": { - "provenance": true - }, - "devDependencies": { - "@sigstore/jest": "^0.0.0", - "@tufjs/repo-mock": "^3.0.1", - "@types/make-fetch-happen": "^10.0.4" - }, - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^3.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json deleted file mode 100644 index d1d3c6b5c4604..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/seeds.json +++ /dev/null @@ -1 +0,0 @@ -{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
 "signatures": [
  {
   "keyid": "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
   "sig": "30460221008ab1f6f17d4f9e6d7dcf1c88912b6b53cc10388644ae1f09bc37a082cd06003e022100e145ef4c7b782d4e8107b53437e669d0476892ce999903ae33d14448366996e7"
  },
  {
   "keyid": "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
   "sig": "3045022100c768b2f86da99569019c160a081da54ae36c34c0a3120d3cb69b53b7d113758e02204f671518f617b20d46537fae6c3b63bae8913f4f1962156105cc4f019ac35c6a"
  },
  {
   "keyid": "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
   "sig": "3045022100b4434e6995d368d23e74759acd0cb9013c83a5d3511f0f997ec54c456ae4350a022015b0e265d182d2b61dc74e155d98b3c3fbe564ba05286aa14c8df02c9b756516"
  },
  {
   "keyid": "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
   "sig": "304502210082c58411d989eb9f861410857d42381590ec9424dbdaa51e78ed13515431904e0220118185da6a6c2947131c17797e2bb7620ce26e5f301d1ceac5f2a7e58f9dcf2e"
  },
  {
   "keyid": "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70",
   "sig": "3046022100c78513854cae9c32eaa6b88e18912f48006c2757a258f917312caba75948eb9e022100d9e1b4ce0adfe9fd2e2148d7fa27a2f40ba1122bd69da7612d8d1776b013c91d"
  },
  {
   "keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
   "sig": "3045022056483a2d5d9ea9cec6e11eadfb33c484b614298faca15acf1c431b11ed7f734c022100d0c1d726af92a87e4e66459ca5adf38a05b44e1f94318423f954bae8bca5bb2e"
  },
  {
   "keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
   "sig": "3046022100d004de88024c32dc5653a9f4843cfc5215427048ad9600d2cf9c969e6edff3d2022100d9ebb798f5fc66af10899dece014a8628ccf3c5402cd4a4270207472f8f6e712"
  },
  {
   "keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
   "sig": "3046022100b7b09996c45ca2d4b05603e56baefa29718a0b71147cf8c6e66349baa61477df022100c4da80c717b4fa7bba0fd5c72da8a0499358b01358b2309f41d1456ea1e7e1d9"
  },
  {
   "keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
   "sig": "3046022100be9782c30744e411a82fa85b5138d601ce148bc19258aec64e7ec24478f38812022100caef63dcaf1a4b9a500d3bd0e3f164ec18f1b63d7a9460d9acab1066db0f016d"
  },
  {
   "keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
   "sig": "30450220746ec3f8534ce55531d0d01ff64964ef440d1e7d2c4c142409b8e9769f1ada6f022100e3b929fcd93ea18feaa0825887a7210489879a66780c07a83f4bd46e2f09ab3b"
  }
 ],
 "signed": {
  "_type": "root",
  "consistent_snapshot": true,
  "expires": "2025-02-19T08:04:32Z",
  "keys": {
   "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@santiagotorres"
   },
   "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@bobcallaway"
   },
   "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@dlorenc"
   },
   "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-online-uri": "gcpkms://projects/sigstore-root-signing/locations/global/keyRings/root/cryptoKeys/timestamp"
   },
   "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@joshuagl"
   },
   "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@mnm678"
   }
  },
  "roles": {
   "root": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "snapshot": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 3650,
    "x-tuf-on-ci-signing-period": 365
   },
   "targets": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "timestamp": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 7,
    "x-tuf-on-ci-signing-period": 4
   }
  },
  "spec_version": "1.0",
  "version": 10,
  "x-tuf-on-ci-expiry-period": 182,
  "x-tuf-on-ci-signing-period": 31
 }
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE deleted file mode 100644 index 420700f5d3765..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 GitHub and the TUF Contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js deleted file mode 100644 index c66d76af86b98..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/config.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.defaultConfig = void 0; -exports.defaultConfig = { - maxRootRotations: 256, - maxDelegations: 32, - rootMaxLength: 512000, //bytes - timestampMaxLength: 16384, // bytes - snapshotMaxLength: 2000000, // bytes - targetsMaxLength: 5000000, // bytes - prefixTargetsWithHash: true, - fetchTimeout: 100000, // milliseconds - fetchRetries: undefined, - fetchRetry: 2, -}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js deleted file mode 100644 index f4b10fa202895..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/error.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; -// An error about insufficient values -class ValueError extends Error { -} -exports.ValueError = ValueError; -class RuntimeError extends Error { -} -exports.RuntimeError = RuntimeError; -class PersistError extends Error { -} -exports.PersistError = PersistError; -// An error with a repository's state, such as a missing file. -// It covers all exceptions that come from the repository side when -// looking from the perspective of users of metadata API or ngclient. -class RepositoryError extends Error { -} -exports.RepositoryError = RepositoryError; -// An error for metadata that contains an invalid version number. -class BadVersionError extends RepositoryError { -} -exports.BadVersionError = BadVersionError; -// An error for metadata containing a previously verified version number. -class EqualVersionError extends BadVersionError { -} -exports.EqualVersionError = EqualVersionError; -// Indicate that a TUF Metadata file has expired. -class ExpiredMetadataError extends RepositoryError { -} -exports.ExpiredMetadataError = ExpiredMetadataError; -//----- Download Errors ------------------------------------------------------- -// An error occurred while attempting to download a file. -class DownloadError extends Error { -} -exports.DownloadError = DownloadError; -// Indicate that a mismatch of lengths was seen while downloading a file -class DownloadLengthMismatchError extends DownloadError { -} -exports.DownloadLengthMismatchError = DownloadLengthMismatchError; -// Returned by FetcherInterface implementations for HTTP errors. -class DownloadHTTPError extends DownloadError { - constructor(message, statusCode) { - super(message); - this.statusCode = statusCode; - } -} -exports.DownloadHTTPError = DownloadHTTPError; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js deleted file mode 100644 index f966ce1bb0cdc..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/fetcher.js +++ /dev/null @@ -1,84 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DefaultFetcher = exports.BaseFetcher = void 0; -const debug_1 = __importDefault(require("debug")); -const fs_1 = __importDefault(require("fs")); -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const tmpfile_1 = require("./utils/tmpfile"); -const log = (0, debug_1.default)('tuf:fetch'); -class BaseFetcher { - // Download file from given URL. The file is downloaded to a temporary - // location and then passed to the given handler. The handler is responsible - // for moving the file to its final location. The temporary file is deleted - // after the handler returns. - async downloadFile(url, maxLength, handler) { - return (0, tmpfile_1.withTempFile)(async (tmpFile) => { - const reader = await this.fetch(url); - let numberOfBytesReceived = 0; - const fileStream = fs_1.default.createWriteStream(tmpFile); - // Read the stream a chunk at a time so that we can check - // the length of the file as we go - try { - for await (const chunk of reader) { - const bufferChunk = Buffer.from(chunk); - numberOfBytesReceived += bufferChunk.length; - if (numberOfBytesReceived > maxLength) { - throw new error_1.DownloadLengthMismatchError('Max length reached'); - } - await writeBufferToStream(fileStream, bufferChunk); - } - } - finally { - // Make sure we always close the stream - await util_1.default.promisify(fileStream.close).bind(fileStream)(); - } - return handler(tmpFile); - }); - } - // Download bytes from given URL. - async downloadBytes(url, maxLength) { - return this.downloadFile(url, maxLength, async (file) => { - const stream = fs_1.default.createReadStream(file); - const chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } - return Buffer.concat(chunks); - }); - } -} -exports.BaseFetcher = BaseFetcher; -class DefaultFetcher extends BaseFetcher { - constructor(options = {}) { - super(); - this.timeout = options.timeout; - this.retry = options.retry; - } - async fetch(url) { - log('GET %s', url); - const response = await (0, make_fetch_happen_1.default)(url, { - timeout: this.timeout, - retry: this.retry, - }); - if (!response.ok || !response?.body) { - throw new error_1.DownloadHTTPError('Failed to download', response.status); - } - return response.body; - } -} -exports.DefaultFetcher = DefaultFetcher; -const writeBufferToStream = async (stream, buffer) => { - return new Promise((resolve, reject) => { - stream.write(buffer, (err) => { - if (err) { - reject(err); - } - resolve(true); - }); - }); -}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js deleted file mode 100644 index 5a83b91f355d8..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/index.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0; -var models_1 = require("@tufjs/models"); -Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } }); -var fetcher_1 = require("./fetcher"); -Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } }); -var updater_1 = require("./updater"); -Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } }); diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js deleted file mode 100644 index 8567336108709..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/store.js +++ /dev/null @@ -1,208 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TrustedMetadataStore = void 0; -const models_1 = require("@tufjs/models"); -const error_1 = require("./error"); -class TrustedMetadataStore { - constructor(rootData) { - this.trustedSet = {}; - // Client workflow 5.1: record fixed update start time - this.referenceTime = new Date(); - // Client workflow 5.2: load trusted root metadata - this.loadTrustedRoot(rootData); - } - get root() { - if (!this.trustedSet.root) { - throw new ReferenceError('No trusted root metadata'); - } - return this.trustedSet.root; - } - get timestamp() { - return this.trustedSet.timestamp; - } - get snapshot() { - return this.trustedSet.snapshot; - } - get targets() { - return this.trustedSet.targets; - } - getRole(name) { - return this.trustedSet[name]; - } - updateRoot(bytesBuffer) { - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); - if (newRoot.signed.type != models_1.MetadataKind.Root) { - throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`); - } - // Client workflow 5.4: check for arbitrary software attack - this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot); - // Client workflow 5.5: check for rollback attack - if (newRoot.signed.version != this.root.signed.version + 1) { - throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`); - } - // Check that new root is signed by self - newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot); - // Client workflow 5.7: set new root as trusted root - this.trustedSet.root = newRoot; - return newRoot; - } - updateTimestamp(bytesBuffer) { - if (this.snapshot) { - throw new error_1.RuntimeError('Cannot update timestamp after snapshot'); - } - if (this.root.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError('Final root.json is expired'); - } - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data); - if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) { - throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`); - } - // Client workflow 5.4.2: check for arbitrary software attack - this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp); - if (this.timestamp) { - // Prevent rolling back timestamp version - // Client workflow 5.4.3.1: check for rollback attack - if (newTimestamp.signed.version < this.timestamp.signed.version) { - throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`); - } - // Keep using old timestamp if versions are equal. - if (newTimestamp.signed.version === this.timestamp.signed.version) { - throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`); - } - // Prevent rolling back snapshot version - // Client workflow 5.4.3.2: check for rollback attack - const snapshotMeta = this.timestamp.signed.snapshotMeta; - const newSnapshotMeta = newTimestamp.signed.snapshotMeta; - if (newSnapshotMeta.version < snapshotMeta.version) { - throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`); - } - } - // expiry not checked to allow old timestamp to be used for rollback - // protection of new timestamp: expiry is checked in update_snapshot - this.trustedSet.timestamp = newTimestamp; - // Client workflow 5.4.4: check for freeze attack - this.checkFinalTimestamp(); - return newTimestamp; - } - updateSnapshot(bytesBuffer, trusted = false) { - if (!this.timestamp) { - throw new error_1.RuntimeError('Cannot update snapshot before timestamp'); - } - if (this.targets) { - throw new error_1.RuntimeError('Cannot update snapshot after targets'); - } - // Snapshot cannot be loaded if final timestamp is expired - this.checkFinalTimestamp(); - const snapshotMeta = this.timestamp.signed.snapshotMeta; - // Verify non-trusted data against the hashes in timestamp, if any. - // Trusted snapshot data has already been verified once. - // Client workflow 5.5.2: check against timestamp role's snaphsot hash - if (!trusted) { - snapshotMeta.verify(bytesBuffer); - } - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data); - if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) { - throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`); - } - // Client workflow 5.5.3: check for arbitrary software attack - this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot); - // version check against meta version (5.5.4) is deferred to allow old - // snapshot to be used in rollback protection - // Client workflow 5.5.5: check for rollback attack - if (this.snapshot) { - Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => { - const newFileInfo = newSnapshot.signed.meta[fileName]; - if (!newFileInfo) { - throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`); - } - if (newFileInfo.version < fileInfo.version) { - throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`); - } - }); - } - this.trustedSet.snapshot = newSnapshot; - // snapshot is loaded, but we raise if it's not valid _final_ snapshot - // Client workflow 5.5.4 & 5.5.6 - this.checkFinalSnapsnot(); - return newSnapshot; - } - updateDelegatedTargets(bytesBuffer, roleName, delegatorName) { - if (!this.snapshot) { - throw new error_1.RuntimeError('Cannot update delegated targets before snapshot'); - } - // Targets cannot be loaded if final snapshot is expired or its version - // does not match meta version in timestamp. - this.checkFinalSnapsnot(); - const delegator = this.trustedSet[delegatorName]; - if (!delegator) { - throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`); - } - // Extract metadata for the delegated role from snapshot - const meta = this.snapshot.signed.meta?.[`${roleName}.json`]; - if (!meta) { - throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`); - } - // Client workflow 5.6.2: check against snapshot role's targets hash - meta.verify(bytesBuffer); - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data); - if (newDelegate.signed.type != models_1.MetadataKind.Targets) { - throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`); - } - // Client workflow 5.6.3: check for arbitrary software attack - delegator.verifyDelegate(roleName, newDelegate); - // Client workflow 5.6.4: Check against snapshot role’s targets version - const version = newDelegate.signed.version; - if (version != meta.version) { - throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`); - } - // Client workflow 5.6.5: check for a freeze attack - if (newDelegate.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`); - } - this.trustedSet[roleName] = newDelegate; - } - // Verifies and loads data as trusted root metadata. - // Note that an expired initial root is still considered valid. - loadTrustedRoot(bytesBuffer) { - const data = JSON.parse(bytesBuffer.toString('utf8')); - const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); - if (root.signed.type != models_1.MetadataKind.Root) { - throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`); - } - root.verifyDelegate(models_1.MetadataKind.Root, root); - this.trustedSet['root'] = root; - } - checkFinalTimestamp() { - // Timestamp MUST be loaded - if (!this.timestamp) { - throw new ReferenceError('No trusted timestamp metadata'); - } - // Client workflow 5.4.4: check for freeze attack - if (this.timestamp.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError('Final timestamp.json is expired'); - } - } - checkFinalSnapsnot() { - // Snapshot and timestamp MUST be loaded - if (!this.snapshot) { - throw new ReferenceError('No trusted snapshot metadata'); - } - if (!this.timestamp) { - throw new ReferenceError('No trusted timestamp metadata'); - } - // Client workflow 5.5.6: check for freeze attack - if (this.snapshot.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError('snapshot.json is expired'); - } - // Client workflow 5.5.4: check against timestamp role’s snapshot version - const snapshotMeta = this.timestamp.signed.snapshotMeta; - if (this.snapshot.signed.version !== snapshotMeta.version) { - throw new error_1.BadVersionError("Snapshot version doesn't match timestamp"); - } - } -} -exports.TrustedMetadataStore = TrustedMetadataStore; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js deleted file mode 100644 index 8d5eb4428f044..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/updater.js +++ /dev/null @@ -1,350 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Updater = void 0; -const models_1 = require("@tufjs/models"); -const debug_1 = __importDefault(require("debug")); -const fs = __importStar(require("fs")); -const path = __importStar(require("path")); -const config_1 = require("./config"); -const error_1 = require("./error"); -const fetcher_1 = require("./fetcher"); -const store_1 = require("./store"); -const url = __importStar(require("./utils/url")); -const log = (0, debug_1.default)('tuf:cache'); -class Updater { - constructor(options) { - const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; - this.dir = metadataDir; - this.metadataBaseUrl = metadataBaseUrl; - this.targetDir = targetDir; - this.targetBaseUrl = targetBaseUrl; - this.forceCache = options.forceCache ?? false; - const data = this.loadLocalMetadata(models_1.MetadataKind.Root); - this.trustedSet = new store_1.TrustedMetadataStore(data); - this.config = { ...config_1.defaultConfig, ...config }; - this.fetcher = - fetcher || - new fetcher_1.DefaultFetcher({ - timeout: this.config.fetchTimeout, - retry: this.config.fetchRetries ?? this.config.fetchRetry, - }); - } - // refresh and load the metadata before downloading the target - // refresh should be called once after the client is initialized - async refresh() { - // If forceCache is true, try to load the timestamp from local storage - // without fetching it from the remote. Otherwise, load the root and - // timestamp from the remote per the TUF spec. - if (this.forceCache) { - // If anything fails, load the root and timestamp from the remote. This - // should cover any situation where the local metadata is corrupted or - // expired. - try { - await this.loadTimestamp({ checkRemote: false }); - } - catch (error) { - await this.loadRoot(); - await this.loadTimestamp(); - } - } - else { - await this.loadRoot(); - await this.loadTimestamp(); - } - await this.loadSnapshot(); - await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root); - } - // Returns the TargetFile instance with information for the given target path. - // - // Implicitly calls refresh if it hasn't already been called. - async getTargetInfo(targetPath) { - if (!this.trustedSet.targets) { - await this.refresh(); - } - return this.preorderDepthFirstWalk(targetPath); - } - async downloadTarget(targetInfo, filePath, targetBaseUrl) { - const targetPath = filePath || this.generateTargetPath(targetInfo); - if (!targetBaseUrl) { - if (!this.targetBaseUrl) { - throw new error_1.ValueError('Target base URL not set'); - } - targetBaseUrl = this.targetBaseUrl; - } - let targetFilePath = targetInfo.path; - const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot; - if (consistentSnapshot && this.config.prefixTargetsWithHash) { - const hashes = Object.values(targetInfo.hashes); - const { dir, base } = path.parse(targetFilePath); - const filename = `${hashes[0]}.${base}`; - targetFilePath = dir ? `${dir}/${filename}` : filename; - } - const targetUrl = url.join(targetBaseUrl, targetFilePath); - // Client workflow 5.7.3: download target file - await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => { - // Verify hashes and length of downloaded file - await targetInfo.verify(fs.createReadStream(fileName)); - // Copy file to target path - log('WRITE %s', targetPath); - fs.copyFileSync(fileName, targetPath); - }); - return targetPath; - } - async findCachedTarget(targetInfo, filePath) { - if (!filePath) { - filePath = this.generateTargetPath(targetInfo); - } - try { - if (fs.existsSync(filePath)) { - await targetInfo.verify(fs.createReadStream(filePath)); - return filePath; - } - } - catch (error) { - return; // File not found - } - return; // File not found - } - loadLocalMetadata(fileName) { - const filePath = path.join(this.dir, `${fileName}.json`); - log('READ %s', filePath); - return fs.readFileSync(filePath); - } - // Sequentially load and persist on local disk every newer root metadata - // version available on the remote. - // Client workflow 5.3: update root role - async loadRoot() { - // Client workflow 5.3.2: version of trusted root metadata file - const rootVersion = this.trustedSet.root.signed.version; - const lowerBound = rootVersion + 1; - const upperBound = lowerBound + this.config.maxRootRotations; - for (let version = lowerBound; version < upperBound; version++) { - const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`); - try { - // Client workflow 5.3.3: download new root metadata file - const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength); - // Client workflow 5.3.4 - 5.4.7 - this.trustedSet.updateRoot(bytesData); - // Client workflow 5.3.8: persist root metadata file - this.persistMetadata(models_1.MetadataKind.Root, bytesData); - } - catch (error) { - if (error instanceof error_1.DownloadHTTPError) { - // 404/403 means current root is newest available - if ([403, 404].includes(error.statusCode)) { - break; - } - } - throw error; - } - } - } - // Load local and remote timestamp metadata. - // Client workflow 5.4: update timestamp role - async loadTimestamp({ checkRemote } = { checkRemote: true }) { - // Load local and remote timestamp metadata - try { - const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp); - this.trustedSet.updateTimestamp(data); - // If checkRemote is disabled, return here to avoid fetching the remote - // timestamp metadata. - if (!checkRemote) { - return; - } - } - catch (error) { - // continue - } - //Load from remote (whether local load succeeded or not) - const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json'); - // Client workflow 5.4.1: download timestamp metadata file - const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength); - try { - // Client workflow 5.4.2 - 5.4.4 - this.trustedSet.updateTimestamp(bytesData); - } - catch (error) { - // If new timestamp version is same as current, discardd the new one. - // This is normal and should NOT raise an error. - if (error instanceof error_1.EqualVersionError) { - return; - } - // Re-raise any other error - throw error; - } - // Client workflow 5.4.5: persist timestamp metadata - this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData); - } - // Load local and remote snapshot metadata. - // Client workflow 5.5: update snapshot role - async loadSnapshot() { - //Load local (and if needed remote) snapshot metadata - try { - const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot); - this.trustedSet.updateSnapshot(data, true); - } - catch (error) { - if (!this.trustedSet.timestamp) { - throw new ReferenceError('No timestamp metadata'); - } - const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta; - const maxLength = snapshotMeta.length || this.config.snapshotMaxLength; - const version = this.trustedSet.root.signed.consistentSnapshot - ? snapshotMeta.version - : undefined; - const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json'); - try { - // Client workflow 5.5.1: download snapshot metadata file - const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength); - // Client workflow 5.5.2 - 5.5.6 - this.trustedSet.updateSnapshot(bytesData); - // Client workflow 5.5.7: persist snapshot metadata file - this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData); - } - catch (error) { - throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`); - } - } - } - // Load local and remote targets metadata. - // Client workflow 5.6: update targets role - async loadTargets(role, parentRole) { - if (this.trustedSet.getRole(role)) { - return this.trustedSet.getRole(role); - } - try { - const buffer = this.loadLocalMetadata(role); - this.trustedSet.updateDelegatedTargets(buffer, role, parentRole); - } - catch (error) { - // Local 'role' does not exist or is invalid: update from remote - if (!this.trustedSet.snapshot) { - throw new ReferenceError('No snapshot metadata'); - } - const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`]; - // TODO: use length for fetching - const maxLength = metaInfo.length || this.config.targetsMaxLength; - const version = this.trustedSet.root.signed.consistentSnapshot - ? metaInfo.version - : undefined; - const encodedRole = encodeURIComponent(role); - const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`); - try { - // Client workflow 5.6.1: download targets metadata file - const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength); - // Client workflow 5.6.2 - 5.6.6 - this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole); - // Client workflow 5.6.7: persist targets metadata file - this.persistMetadata(role, bytesData); - } - catch (error) { - throw new error_1.RuntimeError(`Unable to load targets error ${error}`); - } - } - return this.trustedSet.getRole(role); - } - async preorderDepthFirstWalk(targetPath) { - // Interrogates the tree of target delegations in order of appearance - // (which implicitly order trustworthiness), and returns the matching - // target found in the most trusted role. - // List of delegations to be interrogated. A (role, parent role) pair - // is needed to load and verify the delegated targets metadata. - const delegationsToVisit = [ - { - roleName: models_1.MetadataKind.Targets, - parentRoleName: models_1.MetadataKind.Root, - }, - ]; - const visitedRoleNames = new Set(); - // Client workflow 5.6.7: preorder depth-first traversal of the graph of - // target delegations - while (visitedRoleNames.size <= this.config.maxDelegations && - delegationsToVisit.length > 0) { - // Pop the role name from the top of the stack. - const { roleName, parentRoleName } = delegationsToVisit.pop(); - // Skip any visited current role to prevent cycles. - // Client workflow 5.6.7.1: skip already-visited roles - if (visitedRoleNames.has(roleName)) { - continue; - } - // The metadata for 'role_name' must be downloaded/updated before - // its targets, delegations, and child roles can be inspected. - const targets = (await this.loadTargets(roleName, parentRoleName)) - ?.signed; - if (!targets) { - continue; - } - const target = targets.targets?.[targetPath]; - if (target) { - return target; - } - // After preorder check, add current role to set of visited roles. - visitedRoleNames.add(roleName); - if (targets.delegations) { - const childRolesToVisit = []; - // NOTE: This may be a slow operation if there are many delegated roles. - const rolesForTarget = targets.delegations.rolesForTarget(targetPath); - for (const { role: childName, terminating } of rolesForTarget) { - childRolesToVisit.push({ - roleName: childName, - parentRoleName: roleName, - }); - // Client workflow 5.6.7.2.1 - if (terminating) { - delegationsToVisit.splice(0); // empty the array - break; - } - } - childRolesToVisit.reverse(); - delegationsToVisit.push(...childRolesToVisit); - } - } - return; // no matching target found - } - generateTargetPath(targetInfo) { - if (!this.targetDir) { - throw new error_1.ValueError('Target directory not set'); - } - // URL encode target path - const filePath = encodeURIComponent(targetInfo.path); - return path.join(this.targetDir, filePath); - } - persistMetadata(metaDataName, bytesData) { - const encodedName = encodeURIComponent(metaDataName); - try { - const filePath = path.join(this.dir, `${encodedName}.json`); - log('WRITE %s', filePath); - fs.writeFileSync(filePath, bytesData.toString('utf8')); - } - catch (error) { - throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`); - } - } -} -exports.Updater = Updater; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js deleted file mode 100644 index 923eef6044bcc..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/tmpfile.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.withTempFile = void 0; -const promises_1 = __importDefault(require("fs/promises")); -const os_1 = __importDefault(require("os")); -const path_1 = __importDefault(require("path")); -// Invokes the given handler with the path to a temporary file. The file -// is deleted after the handler returns. -const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile'))); -exports.withTempFile = withTempFile; -// Invokes the given handler with a temporary directory. The directory is -// deleted after the handler returns. -const withTempDir = async (handler) => { - const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir()); - const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep); - try { - return await handler(dir); - } - finally { - await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 }); - } -}; diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js deleted file mode 100644 index 359d1f3ef385b..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/dist/utils/url.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.join = join; -const url_1 = require("url"); -function join(base, path) { - return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString(); -} -function ensureTrailingSlash(path) { - return path.endsWith('/') ? path : path + '/'; -} -function removeLeadingSlash(path) { - return path.startsWith('/') ? path.slice(1) : path; -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json deleted file mode 100644 index e79a3d45f3f06..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "tuf-js", - "version": "3.0.1", - "description": "JavaScript implementation of The Update Framework (TUF)", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsc --build", - "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", - "test": "jest" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/theupdateframework/tuf-js.git" - }, - "files": [ - "dist" - ], - "keywords": [ - "tuf", - "security", - "update" - ], - "author": "bdehamer@github.com", - "license": "MIT", - "bugs": { - "url": "https://github.com/theupdateframework/tuf-js/issues" - }, - "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", - "devDependencies": { - "@tufjs/repo-mock": "3.0.1", - "@types/debug": "^4.1.12", - "@types/make-fetch-happen": "^10.0.4" - }, - "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.3.6", - "make-fetch-happen": "^14.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/@sigstore/tuf/dist/appdata.js index c9a8ee92b531e..06a8143e70da2 100644 --- a/node_modules/@sigstore/tuf/dist/appdata.js +++ b/node_modules/@sigstore/tuf/dist/appdata.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.appDataPath = void 0; +exports.appDataPath = appDataPath; /* Copyright 2023 The Sigstore Authors. @@ -41,4 +41,3 @@ function appDataPath(name) { } } } -exports.appDataPath = appDataPath; diff --git a/node_modules/@sigstore/tuf/dist/client.js b/node_modules/@sigstore/tuf/dist/client.js index 2019c1fd30f88..328f49e40dbbd 100644 --- a/node_modules/@sigstore/tuf/dist/client.js +++ b/node_modules/@sigstore/tuf/dist/client.js @@ -79,7 +79,6 @@ function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) { fs_1.default.copyFileSync(tufRootPath, cachedRootPath); } else { - /* eslint-disable @typescript-eslint/no-var-requires */ const seeds = require('../seeds.json'); const repoSeed = seeds[mirrorURL]; if (!repoSeed) { diff --git a/node_modules/@sigstore/tuf/dist/index.js b/node_modules/@sigstore/tuf/dist/index.js index 678c81d45d21e..2af5de93ec5d2 100644 --- a/node_modules/@sigstore/tuf/dist/index.js +++ b/node_modules/@sigstore/tuf/dist/index.js @@ -1,6 +1,8 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFError = exports.initTUF = exports.getTrustedRoot = exports.DEFAULT_MIRROR_URL = void 0; +exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0; +exports.getTrustedRoot = getTrustedRoot; +exports.initTUF = initTUF; /* Copyright 2023 The Sigstore Authors. @@ -31,14 +33,12 @@ options = {}) { const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); } -exports.getTrustedRoot = getTrustedRoot; async function initTUF( /* istanbul ignore next */ options = {}) { const client = createClient(options); return client.refresh().then(() => client); } -exports.initTUF = initTUF; // Create a TUF client with default options function createClient(options) { /* istanbul ignore next */ diff --git a/node_modules/@sigstore/tuf/dist/target.js b/node_modules/@sigstore/tuf/dist/target.js index 29eaf99a7e721..5c6675bdfbf5f 100644 --- a/node_modules/@sigstore/tuf/dist/target.js +++ b/node_modules/@sigstore/tuf/dist/target.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.readTarget = void 0; +exports.readTarget = readTarget; /* Copyright 2023 The Sigstore Authors. @@ -39,7 +39,6 @@ async function readTarget(tuf, targetPath) { }); }); } -exports.readTarget = readTarget; // Returns the local path to the specified target. If the target is not yet // cached locally, the provided TUF Updater will be used to download and // cache the target. diff --git a/node_modules/@sigstore/tuf/package.json b/node_modules/@sigstore/tuf/package.json index b7fd34ac9674e..808689dfddf92 100644 --- a/node_modules/@sigstore/tuf/package.json +++ b/node_modules/@sigstore/tuf/package.json @@ -1,6 +1,6 @@ { "name": "@sigstore/tuf", - "version": "2.3.4", + "version": "3.0.0", "description": "Client for the Sigstore TUF repository", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -28,14 +28,14 @@ }, "devDependencies": { "@sigstore/jest": "^0.0.0", - "@tufjs/repo-mock": "^2.0.1", + "@tufjs/repo-mock": "^3.0.1", "@types/make-fetch-happen": "^10.0.4" }, "dependencies": { "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^2.2.1" + "tuf-js": "^3.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } } diff --git a/node_modules/@sigstore/tuf/seeds.json b/node_modules/@sigstore/tuf/seeds.json index e8d97d5fa7a67..d1d3c6b5c4604 100644 --- a/node_modules/@sigstore/tuf/seeds.json +++ b/node_modules/@sigstore/tuf/seeds.json @@ -1 +1 @@ -{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
	"signed": {
		"_type": "root",
		"spec_version": "1.0",
		"version": 9,
		"expires": "2024-09-12T06:53:10Z",
		"keys": {
			"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
				}
			},
			"230e212616274a4195cdc28e9fce782c20e6c720f1a811b40f98228376bdd3ac": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"
				}
			},
			"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"923bb39e60dd6fa2c31e6ea55473aa93b64dd4e53e16fbe42f6a207d3f97de2d": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
				}
			},
			"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
				}
			}
		},
		"roles": {
			"root": {
				"keyids": [
					"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
					"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
					"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
					"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
					"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f"
				],
				"threshold": 3
			},
			"snapshot": {
				"keyids": [
					"230e212616274a4195cdc28e9fce782c20e6c720f1a811b40f98228376bdd3ac"
				],
				"threshold": 1
			},
			"targets": {
				"keyids": [
					"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
					"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
					"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
					"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
					"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f"
				],
				"threshold": 3
			},
			"timestamp": {
				"keyids": [
					"923bb39e60dd6fa2c31e6ea55473aa93b64dd4e53e16fbe42f6a207d3f97de2d"
				],
				"threshold": 1
			}
		},
		"consistent_snapshot": true
	},
	"signatures": [
		{
			"keyid": "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c",
			"sig": "30450221008b78f894c3cfed3bd486379c4e0e0dfb3e7dd8cbc4d5598d2818eea1ba3c7550022029d3d06e89d04d37849985dc46c0e10dc5b1fc68dc70af1ec9910303a1f3ee2f"
		},
		{
			"keyid": "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99",
			"sig": "30450221009e6b90b935e09b837a90d4402eaa27d5ea26eb7891948ba0ed7090841248f436022003dc2251c4d4a7999b91e9ad0868765ae09ac7269279f2a7899bafef7a2d9260"
		},
		{
			"keyid": "f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f",
			"sig": "30440220099e907dcf90b7b6e109fd1d6e442006fccbb48894aaaff47ab824b03fb35d0d02202aa0a06c21a4233f37900a48bc8777d3b47f59e3a38616ce631a04df57f96736"
		},
		{
			"keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
			"sig": "30450221008b78f894c3cfed3bd486379c4e0e0dfb3e7dd8cbc4d5598d2818eea1ba3c7550022029d3d06e89d04d37849985dc46c0e10dc5b1fc68dc70af1ec9910303a1f3ee2f"
		},
		{
			"keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
			"sig": "30450221009e6b90b935e09b837a90d4402eaa27d5ea26eb7891948ba0ed7090841248f436022003dc2251c4d4a7999b91e9ad0868765ae09ac7269279f2a7899bafef7a2d9260"
		},
		{
			"keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
			"sig": "304502200e5613b901e0f3e08eceabddc73f98b50ddf892e998d0b369c6e3d451ac48875022100940cf92d1f43ee2e5cdbb22572bb52925ed3863a688f7ffdd4bd2e2e56f028b3"
		},
		{
			"keyid": "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de",
			"sig": "304502202cff44f2215d7a47b28b8f5f580c2cfbbd1bfcfcbbe78de323045b2c0badc5e9022100c743949eb3f4ea5a4b9ae27ac6eddea1f0ff9bfd004f8a9a9d18c6e4142b6e75"
		},
		{
			"keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
			"sig": "30440220099e907dcf90b7b6e109fd1d6e442006fccbb48894aaaff47ab824b03fb35d0d02202aa0a06c21a4233f37900a48bc8777d3b47f59e3a38616ce631a04df57f96736"
		},
		{
			"keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
			"sig": "304502202cff44f2215d7a47b28b8f5f580c2cfbbd1bfcfcbbe78de323045b2c0badc5e9022100c743949eb3f4ea5a4b9ae27ac6eddea1f0ff9bfd004f8a9a9d18c6e4142b6e75"
		},
		{
			"keyid": "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b",
			"sig": "304502200e5613b901e0f3e08eceabddc73f98b50ddf892e998d0b369c6e3d451ac48875022100940cf92d1f43ee2e5cdbb22572bb52925ed3863a688f7ffdd4bd2e2e56f028b3"
		}
	]
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}} +{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
 "signatures": [
  {
   "keyid": "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
   "sig": "30460221008ab1f6f17d4f9e6d7dcf1c88912b6b53cc10388644ae1f09bc37a082cd06003e022100e145ef4c7b782d4e8107b53437e669d0476892ce999903ae33d14448366996e7"
  },
  {
   "keyid": "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
   "sig": "3045022100c768b2f86da99569019c160a081da54ae36c34c0a3120d3cb69b53b7d113758e02204f671518f617b20d46537fae6c3b63bae8913f4f1962156105cc4f019ac35c6a"
  },
  {
   "keyid": "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
   "sig": "3045022100b4434e6995d368d23e74759acd0cb9013c83a5d3511f0f997ec54c456ae4350a022015b0e265d182d2b61dc74e155d98b3c3fbe564ba05286aa14c8df02c9b756516"
  },
  {
   "keyid": "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
   "sig": "304502210082c58411d989eb9f861410857d42381590ec9424dbdaa51e78ed13515431904e0220118185da6a6c2947131c17797e2bb7620ce26e5f301d1ceac5f2a7e58f9dcf2e"
  },
  {
   "keyid": "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70",
   "sig": "3046022100c78513854cae9c32eaa6b88e18912f48006c2757a258f917312caba75948eb9e022100d9e1b4ce0adfe9fd2e2148d7fa27a2f40ba1122bd69da7612d8d1776b013c91d"
  },
  {
   "keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
   "sig": "3045022056483a2d5d9ea9cec6e11eadfb33c484b614298faca15acf1c431b11ed7f734c022100d0c1d726af92a87e4e66459ca5adf38a05b44e1f94318423f954bae8bca5bb2e"
  },
  {
   "keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
   "sig": "3046022100d004de88024c32dc5653a9f4843cfc5215427048ad9600d2cf9c969e6edff3d2022100d9ebb798f5fc66af10899dece014a8628ccf3c5402cd4a4270207472f8f6e712"
  },
  {
   "keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
   "sig": "3046022100b7b09996c45ca2d4b05603e56baefa29718a0b71147cf8c6e66349baa61477df022100c4da80c717b4fa7bba0fd5c72da8a0499358b01358b2309f41d1456ea1e7e1d9"
  },
  {
   "keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
   "sig": "3046022100be9782c30744e411a82fa85b5138d601ce148bc19258aec64e7ec24478f38812022100caef63dcaf1a4b9a500d3bd0e3f164ec18f1b63d7a9460d9acab1066db0f016d"
  },
  {
   "keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
   "sig": "30450220746ec3f8534ce55531d0d01ff64964ef440d1e7d2c4c142409b8e9769f1ada6f022100e3b929fcd93ea18feaa0825887a7210489879a66780c07a83f4bd46e2f09ab3b"
  }
 ],
 "signed": {
  "_type": "root",
  "consistent_snapshot": true,
  "expires": "2025-02-19T08:04:32Z",
  "keys": {
   "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@santiagotorres"
   },
   "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@bobcallaway"
   },
   "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@dlorenc"
   },
   "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-online-uri": "gcpkms://projects/sigstore-root-signing/locations/global/keyRings/root/cryptoKeys/timestamp"
   },
   "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@joshuagl"
   },
   "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@mnm678"
   }
  },
  "roles": {
   "root": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "snapshot": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 3650,
    "x-tuf-on-ci-signing-period": 365
   },
   "targets": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "timestamp": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 7,
    "x-tuf-on-ci-signing-period": 4
   }
  },
  "spec_version": "1.0",
  "version": 10,
  "x-tuf-on-ci-expiry-period": 182,
  "x-tuf-on-ci-signing-period": 31
 }
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}} diff --git a/node_modules/@tufjs/models/LICENSE b/node_modules/@tufjs/models/LICENSE deleted file mode 100644 index 420700f5d3765..0000000000000 --- a/node_modules/@tufjs/models/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 GitHub and the TUF Contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@tufjs/models/dist/base.js b/node_modules/@tufjs/models/dist/base.js deleted file mode 100644 index 259f6799c13a0..0000000000000 --- a/node_modules/@tufjs/models/dist/base.js +++ /dev/null @@ -1,83 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0; -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -const SPECIFICATION_VERSION = ['1', '0', '31']; -var MetadataKind; -(function (MetadataKind) { - MetadataKind["Root"] = "root"; - MetadataKind["Timestamp"] = "timestamp"; - MetadataKind["Snapshot"] = "snapshot"; - MetadataKind["Targets"] = "targets"; -})(MetadataKind || (exports.MetadataKind = MetadataKind = {})); -function isMetadataKind(value) { - return (typeof value === 'string' && - Object.values(MetadataKind).includes(value)); -} -exports.isMetadataKind = isMetadataKind; -/*** - * A base class for the signed part of TUF metadata. - * - * Objects with base class Signed are usually included in a ``Metadata`` object - * on the signed attribute. This class provides attributes and methods that - * are common for all TUF metadata types (roles). - */ -class Signed { - constructor(options) { - this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.'); - const specList = this.specVersion.split('.'); - if (!(specList.length === 2 || specList.length === 3) || - !specList.every((item) => isNumeric(item))) { - throw new error_1.ValueError('Failed to parse specVersion'); - } - // major version must match - if (specList[0] != SPECIFICATION_VERSION[0]) { - throw new error_1.ValueError('Unsupported specVersion'); - } - this.expires = options.expires || new Date().toISOString(); - this.version = options.version || 1; - this.unrecognizedFields = options.unrecognizedFields || {}; - } - equals(other) { - if (!(other instanceof Signed)) { - return false; - } - return (this.specVersion === other.specVersion && - this.expires === other.expires && - this.version === other.version && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - isExpired(referenceTime) { - if (!referenceTime) { - referenceTime = new Date(); - } - return referenceTime >= new Date(this.expires); - } - static commonFieldsFromJSON(data) { - const { spec_version, expires, version, ...rest } = data; - if (utils_1.guard.isDefined(spec_version) && !(typeof spec_version === 'string')) { - throw new TypeError('spec_version must be a string'); - } - if (utils_1.guard.isDefined(expires) && !(typeof expires === 'string')) { - throw new TypeError('expires must be a string'); - } - if (utils_1.guard.isDefined(version) && !(typeof version === 'number')) { - throw new TypeError('version must be a number'); - } - return { - specVersion: spec_version, - expires, - version, - unrecognizedFields: rest, - }; - } -} -exports.Signed = Signed; -function isNumeric(str) { - return !isNaN(Number(str)); -} diff --git a/node_modules/@tufjs/models/dist/delegations.js b/node_modules/@tufjs/models/dist/delegations.js deleted file mode 100644 index 7165f1e244393..0000000000000 --- a/node_modules/@tufjs/models/dist/delegations.js +++ /dev/null @@ -1,115 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Delegations = void 0; -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const key_1 = require("./key"); -const role_1 = require("./role"); -const utils_1 = require("./utils"); -/** - * A container object storing information about all delegations. - * - * Targets roles that are trusted to provide signed metadata files - * describing targets with designated pathnames and/or further delegations. - */ -class Delegations { - constructor(options) { - this.keys = options.keys; - this.unrecognizedFields = options.unrecognizedFields || {}; - if (options.roles) { - if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) { - throw new error_1.ValueError('Delegated role name conflicts with top-level role name'); - } - } - this.succinctRoles = options.succinctRoles; - this.roles = options.roles; - } - equals(other) { - if (!(other instanceof Delegations)) { - return false; - } - return (util_1.default.isDeepStrictEqual(this.keys, other.keys) && - util_1.default.isDeepStrictEqual(this.roles, other.roles) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) && - util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles)); - } - *rolesForTarget(targetPath) { - if (this.roles) { - for (const role of Object.values(this.roles)) { - if (role.isDelegatedPath(targetPath)) { - yield { role: role.name, terminating: role.terminating }; - } - } - } - else if (this.succinctRoles) { - yield { - role: this.succinctRoles.getRoleForTarget(targetPath), - terminating: true, - }; - } - } - toJSON() { - const json = { - keys: keysToJSON(this.keys), - ...this.unrecognizedFields, - }; - if (this.roles) { - json.roles = rolesToJSON(this.roles); - } - else if (this.succinctRoles) { - json.succinct_roles = this.succinctRoles.toJSON(); - } - return json; - } - static fromJSON(data) { - const { keys, roles, succinct_roles, ...unrecognizedFields } = data; - let succinctRoles; - if (utils_1.guard.isObject(succinct_roles)) { - succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); - } - return new Delegations({ - keys: keysFromJSON(keys), - roles: rolesFromJSON(roles), - unrecognizedFields, - succinctRoles, - }); - } -} -exports.Delegations = Delegations; -function keysToJSON(keys) { - return Object.entries(keys).reduce((acc, [keyId, key]) => ({ - ...acc, - [keyId]: key.toJSON(), - }), {}); -} -function rolesToJSON(roles) { - return Object.values(roles).map((role) => role.toJSON()); -} -function keysFromJSON(data) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('keys is malformed'); - } - return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ - ...acc, - [keyID]: key_1.Key.fromJSON(keyID, keyData), - }), {}); -} -function rolesFromJSON(data) { - let roleMap; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectArray(data)) { - throw new TypeError('roles is malformed'); - } - roleMap = data.reduce((acc, role) => { - const delegatedRole = role_1.DelegatedRole.fromJSON(role); - return { - ...acc, - [delegatedRole.name]: delegatedRole, - }; - }, {}); - } - return roleMap; -} diff --git a/node_modules/@tufjs/models/dist/error.js b/node_modules/@tufjs/models/dist/error.js deleted file mode 100644 index ba80698747ba0..0000000000000 --- a/node_modules/@tufjs/models/dist/error.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; -// An error about insufficient values -class ValueError extends Error { -} -exports.ValueError = ValueError; -// An error with a repository's state, such as a missing file. -// It covers all exceptions that come from the repository side when -// looking from the perspective of users of metadata API or ngclient. -class RepositoryError extends Error { -} -exports.RepositoryError = RepositoryError; -// An error about metadata object with insufficient threshold of signatures. -class UnsignedMetadataError extends RepositoryError { -} -exports.UnsignedMetadataError = UnsignedMetadataError; -// An error while checking the length and hash values of an object. -class LengthOrHashMismatchError extends RepositoryError { -} -exports.LengthOrHashMismatchError = LengthOrHashMismatchError; -class CryptoError extends Error { -} -exports.CryptoError = CryptoError; -class UnsupportedAlgorithmError extends CryptoError { -} -exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/node_modules/@tufjs/models/dist/file.js b/node_modules/@tufjs/models/dist/file.js deleted file mode 100644 index b35fe5950bbb7..0000000000000 --- a/node_modules/@tufjs/models/dist/file.js +++ /dev/null @@ -1,183 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TargetFile = exports.MetaFile = void 0; -const crypto_1 = __importDefault(require("crypto")); -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -// A container with information about a particular metadata file. -// -// This class is used for Timestamp and Snapshot metadata. -class MetaFile { - constructor(opts) { - if (opts.version <= 0) { - throw new error_1.ValueError('Metafile version must be at least 1'); - } - if (opts.length !== undefined) { - validateLength(opts.length); - } - this.version = opts.version; - this.length = opts.length; - this.hashes = opts.hashes; - this.unrecognizedFields = opts.unrecognizedFields || {}; - } - equals(other) { - if (!(other instanceof MetaFile)) { - return false; - } - return (this.version === other.version && - this.length === other.length && - util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - verify(data) { - // Verifies that the given data matches the expected length. - if (this.length !== undefined) { - if (data.length !== this.length) { - throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`); - } - } - // Verifies that the given data matches the supplied hashes. - if (this.hashes) { - Object.entries(this.hashes).forEach(([key, value]) => { - let hash; - try { - hash = crypto_1.default.createHash(key); - } - catch (e) { - throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); - } - const observedHash = hash.update(data).digest('hex'); - if (observedHash !== value) { - throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`); - } - }); - } - } - toJSON() { - const json = { - version: this.version, - ...this.unrecognizedFields, - }; - if (this.length !== undefined) { - json.length = this.length; - } - if (this.hashes) { - json.hashes = this.hashes; - } - return json; - } - static fromJSON(data) { - const { version, length, hashes, ...rest } = data; - if (typeof version !== 'number') { - throw new TypeError('version must be a number'); - } - if (utils_1.guard.isDefined(length) && typeof length !== 'number') { - throw new TypeError('length must be a number'); - } - if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { - throw new TypeError('hashes must be string keys and values'); - } - return new MetaFile({ - version, - length, - hashes, - unrecognizedFields: rest, - }); - } -} -exports.MetaFile = MetaFile; -// Container for info about a particular target file. -// -// This class is used for Target metadata. -class TargetFile { - constructor(opts) { - validateLength(opts.length); - this.length = opts.length; - this.path = opts.path; - this.hashes = opts.hashes; - this.unrecognizedFields = opts.unrecognizedFields || {}; - } - get custom() { - const custom = this.unrecognizedFields['custom']; - if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) { - return {}; - } - return custom; - } - equals(other) { - if (!(other instanceof TargetFile)) { - return false; - } - return (this.length === other.length && - this.path === other.path && - util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - async verify(stream) { - let observedLength = 0; - // Create a digest for each hash algorithm - const digests = Object.keys(this.hashes).reduce((acc, key) => { - try { - acc[key] = crypto_1.default.createHash(key); - } - catch (e) { - throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); - } - return acc; - }, {}); - // Read stream chunk by chunk - for await (const chunk of stream) { - // Keep running tally of stream length - observedLength += chunk.length; - // Append chunk to each digest - Object.values(digests).forEach((digest) => { - digest.update(chunk); - }); - } - // Verify length matches expected value - if (observedLength !== this.length) { - throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`); - } - // Verify each digest matches expected value - Object.entries(digests).forEach(([key, value]) => { - const expected = this.hashes[key]; - const actual = value.digest('hex'); - if (actual !== expected) { - throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`); - } - }); - } - toJSON() { - return { - length: this.length, - hashes: this.hashes, - ...this.unrecognizedFields, - }; - } - static fromJSON(path, data) { - const { length, hashes, ...rest } = data; - if (typeof length !== 'number') { - throw new TypeError('length must be a number'); - } - if (!utils_1.guard.isStringRecord(hashes)) { - throw new TypeError('hashes must have string keys and values'); - } - return new TargetFile({ - length, - path, - hashes, - unrecognizedFields: rest, - }); - } -} -exports.TargetFile = TargetFile; -// Check that supplied length if valid -function validateLength(length) { - if (length < 0) { - throw new error_1.ValueError('Length must be at least 0'); - } -} diff --git a/node_modules/@tufjs/models/dist/index.js b/node_modules/@tufjs/models/dist/index.js deleted file mode 100644 index a4dc783659f04..0000000000000 --- a/node_modules/@tufjs/models/dist/index.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; -var base_1 = require("./base"); -Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); -var file_1 = require("./file"); -Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); -Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); -var key_1 = require("./key"); -Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); -var metadata_1 = require("./metadata"); -Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); -var root_1 = require("./root"); -Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); -var signature_1 = require("./signature"); -Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); -var snapshot_1 = require("./snapshot"); -Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); -var targets_1 = require("./targets"); -Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); -var timestamp_1 = require("./timestamp"); -Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/node_modules/@tufjs/models/dist/key.js b/node_modules/@tufjs/models/dist/key.js deleted file mode 100644 index 5e55b09d7c6dd..0000000000000 --- a/node_modules/@tufjs/models/dist/key.js +++ /dev/null @@ -1,85 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Key = void 0; -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -const key_1 = require("./utils/key"); -// A container class representing the public portion of a Key. -class Key { - constructor(options) { - const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options; - this.keyID = keyID; - this.keyType = keyType; - this.scheme = scheme; - this.keyVal = keyVal; - this.unrecognizedFields = unrecognizedFields || {}; - } - // Verifies the that the metadata.signatures contains a signature made with - // this key and is correctly signed. - verifySignature(metadata) { - const signature = metadata.signatures[this.keyID]; - if (!signature) - throw new error_1.UnsignedMetadataError('no signature for key found in metadata'); - if (!this.keyVal.public) - throw new error_1.UnsignedMetadataError('no public key found'); - const publicKey = (0, key_1.getPublicKey)({ - keyType: this.keyType, - scheme: this.scheme, - keyVal: this.keyVal.public, - }); - const signedData = metadata.signed.toJSON(); - try { - if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { - throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); - } - } - catch (error) { - if (error instanceof error_1.UnsignedMetadataError) { - throw error; - } - throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); - } - } - equals(other) { - if (!(other instanceof Key)) { - return false; - } - return (this.keyID === other.keyID && - this.keyType === other.keyType && - this.scheme === other.scheme && - util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - toJSON() { - return { - keytype: this.keyType, - scheme: this.scheme, - keyval: this.keyVal, - ...this.unrecognizedFields, - }; - } - static fromJSON(keyID, data) { - const { keytype, scheme, keyval, ...rest } = data; - if (typeof keytype !== 'string') { - throw new TypeError('keytype must be a string'); - } - if (typeof scheme !== 'string') { - throw new TypeError('scheme must be a string'); - } - if (!utils_1.guard.isStringRecord(keyval)) { - throw new TypeError('keyval must be a string record'); - } - return new Key({ - keyID, - keyType: keytype, - scheme, - keyVal: keyval, - unrecognizedFields: rest, - }); - } -} -exports.Key = Key; diff --git a/node_modules/@tufjs/models/dist/metadata.js b/node_modules/@tufjs/models/dist/metadata.js deleted file mode 100644 index 9668b6f14fa70..0000000000000 --- a/node_modules/@tufjs/models/dist/metadata.js +++ /dev/null @@ -1,158 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Metadata = void 0; -const canonical_json_1 = require("@tufjs/canonical-json"); -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const error_1 = require("./error"); -const root_1 = require("./root"); -const signature_1 = require("./signature"); -const snapshot_1 = require("./snapshot"); -const targets_1 = require("./targets"); -const timestamp_1 = require("./timestamp"); -const utils_1 = require("./utils"); -/*** - * A container for signed TUF metadata. - * - * Provides methods to convert to and from json, read and write to and - * from JSON and to create and verify metadata signatures. - * - * ``Metadata[T]`` is a generic container type where T can be any one type of - * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this - * is to allow static type checking of the signed attribute in code using - * Metadata:: - * - * root_md = Metadata[Root].fromJSON("root.json") - * # root_md type is now Metadata[Root]. This means signed and its - * # attributes like consistent_snapshot are now statically typed and the - * # types can be verified by static type checkers and shown by IDEs - * - * Using a type constraint is not required but not doing so means T is not a - * specific type so static typing cannot happen. Note that the type constraint - * ``[Root]`` is not validated at runtime (as pure annotations are not available - * then). - * - * Apart from ``expires`` all of the arguments to the inner constructors have - * reasonable default values for new metadata. - */ -class Metadata { - constructor(signed, signatures, unrecognizedFields) { - this.signed = signed; - this.signatures = signatures || {}; - this.unrecognizedFields = unrecognizedFields || {}; - } - sign(signer, append = true) { - const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON())); - const signature = signer(bytes); - if (!append) { - this.signatures = {}; - } - this.signatures[signature.keyID] = signature; - } - verifyDelegate(delegatedRole, delegatedMetadata) { - let role; - let keys = {}; - switch (this.signed.type) { - case base_1.MetadataKind.Root: - keys = this.signed.keys; - role = this.signed.roles[delegatedRole]; - break; - case base_1.MetadataKind.Targets: - if (!this.signed.delegations) { - throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); - } - keys = this.signed.delegations.keys; - if (this.signed.delegations.roles) { - role = this.signed.delegations.roles[delegatedRole]; - } - else if (this.signed.delegations.succinctRoles) { - if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) { - role = this.signed.delegations.succinctRoles; - } - } - break; - default: - throw new TypeError('invalid metadata type'); - } - if (!role) { - throw new error_1.ValueError(`no delegation found for ${delegatedRole}`); - } - const signingKeys = new Set(); - role.keyIDs.forEach((keyID) => { - const key = keys[keyID]; - // If we dont' have the key, continue checking other keys - if (!key) { - return; - } - try { - key.verifySignature(delegatedMetadata); - signingKeys.add(key.keyID); - } - catch (error) { - // continue - } - }); - if (signingKeys.size < role.threshold) { - throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`); - } - } - equals(other) { - if (!(other instanceof Metadata)) { - return false; - } - return (this.signed.equals(other.signed) && - util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - toJSON() { - const signatures = Object.values(this.signatures).map((signature) => { - return signature.toJSON(); - }); - return { - signatures, - signed: this.signed.toJSON(), - ...this.unrecognizedFields, - }; - } - static fromJSON(type, data) { - const { signed, signatures, ...rest } = data; - if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { - throw new TypeError('signed is not defined'); - } - if (type !== signed._type) { - throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`); - } - let signedObj; - switch (type) { - case base_1.MetadataKind.Root: - signedObj = root_1.Root.fromJSON(signed); - break; - case base_1.MetadataKind.Timestamp: - signedObj = timestamp_1.Timestamp.fromJSON(signed); - break; - case base_1.MetadataKind.Snapshot: - signedObj = snapshot_1.Snapshot.fromJSON(signed); - break; - case base_1.MetadataKind.Targets: - signedObj = targets_1.Targets.fromJSON(signed); - break; - default: - throw new TypeError('invalid metadata type'); - } - const sigMap = signaturesFromJSON(signatures); - return new Metadata(signedObj, sigMap, rest); - } -} -exports.Metadata = Metadata; -function signaturesFromJSON(data) { - if (!utils_1.guard.isObjectArray(data)) { - throw new TypeError('signatures is not an array'); - } - return data.reduce((acc, sigData) => { - const signature = signature_1.Signature.fromJSON(sigData); - return { ...acc, [signature.keyID]: signature }; - }, {}); -} diff --git a/node_modules/@tufjs/models/dist/role.js b/node_modules/@tufjs/models/dist/role.js deleted file mode 100644 index f7ddbc6fe3f38..0000000000000 --- a/node_modules/@tufjs/models/dist/role.js +++ /dev/null @@ -1,299 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0; -const crypto_1 = __importDefault(require("crypto")); -const minimatch_1 = require("minimatch"); -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -exports.TOP_LEVEL_ROLE_NAMES = [ - 'root', - 'targets', - 'snapshot', - 'timestamp', -]; -/** - * Container that defines which keys are required to sign roles metadata. - * - * Role defines how many keys are required to successfully sign the roles - * metadata, and which keys are accepted. - */ -class Role { - constructor(options) { - const { keyIDs, threshold, unrecognizedFields } = options; - if (hasDuplicates(keyIDs)) { - throw new error_1.ValueError('duplicate key IDs found'); - } - if (threshold < 1) { - throw new error_1.ValueError('threshold must be at least 1'); - } - this.keyIDs = keyIDs; - this.threshold = threshold; - this.unrecognizedFields = unrecognizedFields || {}; - } - equals(other) { - if (!(other instanceof Role)) { - return false; - } - return (this.threshold === other.threshold && - util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - toJSON() { - return { - keyids: this.keyIDs, - threshold: this.threshold, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { keyids, threshold, ...rest } = data; - if (!utils_1.guard.isStringArray(keyids)) { - throw new TypeError('keyids must be an array'); - } - if (typeof threshold !== 'number') { - throw new TypeError('threshold must be a number'); - } - return new Role({ - keyIDs: keyids, - threshold, - unrecognizedFields: rest, - }); - } -} -exports.Role = Role; -function hasDuplicates(array) { - return new Set(array).size !== array.length; -} -/** - * A container with information about a delegated role. - * - * A delegation can happen in two ways: - * - ``paths`` is set: delegates targets matching any path pattern in ``paths`` - * - ``pathHashPrefixes`` is set: delegates targets whose target path hash - * starts with any of the prefixes in ``pathHashPrefixes`` - * - * ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be - * set, at least one of them must be set. - */ -class DelegatedRole extends Role { - constructor(opts) { - super(opts); - const { name, terminating, paths, pathHashPrefixes } = opts; - this.name = name; - this.terminating = terminating; - if (opts.paths && opts.pathHashPrefixes) { - throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive'); - } - this.paths = paths; - this.pathHashPrefixes = pathHashPrefixes; - } - equals(other) { - if (!(other instanceof DelegatedRole)) { - return false; - } - return (super.equals(other) && - this.name === other.name && - this.terminating === other.terminating && - util_1.default.isDeepStrictEqual(this.paths, other.paths) && - util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes)); - } - isDelegatedPath(targetFilepath) { - if (this.paths) { - return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern)); - } - if (this.pathHashPrefixes) { - const hasher = crypto_1.default.createHash('sha256'); - const pathHash = hasher.update(targetFilepath).digest('hex'); - return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix)); - } - return false; - } - toJSON() { - const json = { - ...super.toJSON(), - name: this.name, - terminating: this.terminating, - }; - if (this.paths) { - json.paths = this.paths; - } - if (this.pathHashPrefixes) { - json.path_hash_prefixes = this.pathHashPrefixes; - } - return json; - } - static fromJSON(data) { - const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; - if (!utils_1.guard.isStringArray(keyids)) { - throw new TypeError('keyids must be an array of strings'); - } - if (typeof threshold !== 'number') { - throw new TypeError('threshold must be a number'); - } - if (typeof name !== 'string') { - throw new TypeError('name must be a string'); - } - if (typeof terminating !== 'boolean') { - throw new TypeError('terminating must be a boolean'); - } - if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { - throw new TypeError('paths must be an array of strings'); - } - if (utils_1.guard.isDefined(path_hash_prefixes) && - !utils_1.guard.isStringArray(path_hash_prefixes)) { - throw new TypeError('path_hash_prefixes must be an array of strings'); - } - return new DelegatedRole({ - keyIDs: keyids, - threshold, - name, - terminating, - paths, - pathHashPrefixes: path_hash_prefixes, - unrecognizedFields: rest, - }); - } -} -exports.DelegatedRole = DelegatedRole; -// JS version of Ruby's Array#zip -const zip = (a, b) => a.map((k, i) => [k, b[i]]); -function isTargetInPathPattern(target, pattern) { - const targetParts = target.split('/'); - const patternParts = pattern.split('/'); - if (patternParts.length != targetParts.length) { - return false; - } - return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart)); -} -/** - * Succinctly defines a hash bin delegation graph. - * - * A ``SuccinctRoles`` object describes a delegation graph that covers all - * targets, distributing them uniformly over the delegated roles (i.e. bins) - * in the graph. - * - * The total number of bins is 2 to the power of the passed ``bit_length``. - * - * Bin names are the concatenation of the passed ``name_prefix`` and a - * zero-padded hex representation of the bin index separated by a hyphen. - * - * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin - * is 'terminating'. - * - * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md - */ -class SuccinctRoles extends Role { - constructor(opts) { - super(opts); - const { bitLength, namePrefix } = opts; - if (bitLength <= 0 || bitLength > 32) { - throw new error_1.ValueError('bitLength must be between 1 and 32'); - } - this.bitLength = bitLength; - this.namePrefix = namePrefix; - // Calculate the suffix_len value based on the total number of bins in - // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will - // have a suffix between "000" and "3ff" in hex and suffix_len will be 3 - // meaning the third bin will have a suffix of "003". - this.numberOfBins = Math.pow(2, bitLength); - // suffix_len is calculated based on "number_of_bins - 1" as the name - // of the last bin contains the number "number_of_bins -1" as a suffix. - this.suffixLen = (this.numberOfBins - 1).toString(16).length; - } - equals(other) { - if (!(other instanceof SuccinctRoles)) { - return false; - } - return (super.equals(other) && - this.bitLength === other.bitLength && - this.namePrefix === other.namePrefix); - } - /*** - * Calculates the name of the delegated role responsible for 'target_filepath'. - * - * The target at path ''target_filepath' is assigned to a bin by casting - * the left-most 'bit_length' of bits of the file path hash digest to - * int, using it as bin index between 0 and '2**bit_length - 1'. - * - * Args: - * target_filepath: URL path to a target file, relative to a base - * targets URL. - */ - getRoleForTarget(targetFilepath) { - const hasher = crypto_1.default.createHash('sha256'); - const hasherBuffer = hasher.update(targetFilepath).digest(); - // can't ever need more than 4 bytes (32 bits). - const hashBytes = hasherBuffer.subarray(0, 4); - // Right shift hash bytes, so that we only have the leftmost - // bit_length bits that we care about. - const shiftValue = 32 - this.bitLength; - const binNumber = hashBytes.readUInt32BE() >>> shiftValue; - // Add zero padding if necessary and cast to hex the suffix. - const suffix = binNumber.toString(16).padStart(this.suffixLen, '0'); - return `${this.namePrefix}-${suffix}`; - } - *getRoles() { - for (let i = 0; i < this.numberOfBins; i++) { - const suffix = i.toString(16).padStart(this.suffixLen, '0'); - yield `${this.namePrefix}-${suffix}`; - } - } - /*** - * Determines whether the given ``role_name`` is in one of - * the delegated roles that ``SuccinctRoles`` represents. - * - * Args: - * role_name: The name of the role to check against. - */ - isDelegatedRole(roleName) { - const desiredPrefix = this.namePrefix + '-'; - if (!roleName.startsWith(desiredPrefix)) { - return false; - } - const suffix = roleName.slice(desiredPrefix.length, roleName.length); - if (suffix.length != this.suffixLen) { - return false; - } - // make sure the suffix is a hex string - if (!suffix.match(/^[0-9a-fA-F]+$/)) { - return false; - } - const num = parseInt(suffix, 16); - return 0 <= num && num < this.numberOfBins; - } - toJSON() { - const json = { - ...super.toJSON(), - bit_length: this.bitLength, - name_prefix: this.namePrefix, - }; - return json; - } - static fromJSON(data) { - const { keyids, threshold, bit_length, name_prefix, ...rest } = data; - if (!utils_1.guard.isStringArray(keyids)) { - throw new TypeError('keyids must be an array of strings'); - } - if (typeof threshold !== 'number') { - throw new TypeError('threshold must be a number'); - } - if (typeof bit_length !== 'number') { - throw new TypeError('bit_length must be a number'); - } - if (typeof name_prefix !== 'string') { - throw new TypeError('name_prefix must be a string'); - } - return new SuccinctRoles({ - keyIDs: keyids, - threshold, - bitLength: bit_length, - namePrefix: name_prefix, - unrecognizedFields: rest, - }); - } -} -exports.SuccinctRoles = SuccinctRoles; diff --git a/node_modules/@tufjs/models/dist/root.js b/node_modules/@tufjs/models/dist/root.js deleted file mode 100644 index 36d0ef0f186d1..0000000000000 --- a/node_modules/@tufjs/models/dist/root.js +++ /dev/null @@ -1,116 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Root = void 0; -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const error_1 = require("./error"); -const key_1 = require("./key"); -const role_1 = require("./role"); -const utils_1 = require("./utils"); -/** - * A container for the signed part of root metadata. - * - * The top-level role and metadata file signed by the root keys. - * This role specifies trusted keys for all other top-level roles, which may further delegate trust. - */ -class Root extends base_1.Signed { - constructor(options) { - super(options); - this.type = base_1.MetadataKind.Root; - this.keys = options.keys || {}; - this.consistentSnapshot = options.consistentSnapshot ?? true; - if (!options.roles) { - this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({ - ...acc, - [role]: new role_1.Role({ keyIDs: [], threshold: 1 }), - }), {}); - } - else { - const roleNames = new Set(Object.keys(options.roles)); - if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) { - throw new error_1.ValueError('missing top-level role'); - } - this.roles = options.roles; - } - } - addKey(key, role) { - if (!this.roles[role]) { - throw new error_1.ValueError(`role ${role} does not exist`); - } - if (!this.roles[role].keyIDs.includes(key.keyID)) { - this.roles[role].keyIDs.push(key.keyID); - } - this.keys[key.keyID] = key; - } - equals(other) { - if (!(other instanceof Root)) { - return false; - } - return (super.equals(other) && - this.consistentSnapshot === other.consistentSnapshot && - util_1.default.isDeepStrictEqual(this.keys, other.keys) && - util_1.default.isDeepStrictEqual(this.roles, other.roles)); - } - toJSON() { - return { - _type: this.type, - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - keys: keysToJSON(this.keys), - roles: rolesToJSON(this.roles), - consistent_snapshot: this.consistentSnapshot, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields; - if (typeof consistent_snapshot !== 'boolean') { - throw new TypeError('consistent_snapshot must be a boolean'); - } - return new Root({ - ...commonFields, - keys: keysFromJSON(keys), - roles: rolesFromJSON(roles), - consistentSnapshot: consistent_snapshot, - unrecognizedFields: rest, - }); - } -} -exports.Root = Root; -function keysToJSON(keys) { - return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {}); -} -function rolesToJSON(roles) { - return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {}); -} -function keysFromJSON(data) { - let keys; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('keys must be an object'); - } - keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ - ...acc, - [keyID]: key_1.Key.fromJSON(keyID, keyData), - }), {}); - } - return keys; -} -function rolesFromJSON(data) { - let roles; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('roles must be an object'); - } - roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ - ...acc, - [roleName]: role_1.Role.fromJSON(roleData), - }), {}); - } - return roles; -} diff --git a/node_modules/@tufjs/models/dist/signature.js b/node_modules/@tufjs/models/dist/signature.js deleted file mode 100644 index 33eb204eb0835..0000000000000 --- a/node_modules/@tufjs/models/dist/signature.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Signature = void 0; -/** - * A container class containing information about a signature. - * - * Contains a signature and the keyid uniquely identifying the key used - * to generate the signature. - * - * Provide a `fromJSON` method to create a Signature from a JSON object. - */ -class Signature { - constructor(options) { - const { keyID, sig } = options; - this.keyID = keyID; - this.sig = sig; - } - toJSON() { - return { - keyid: this.keyID, - sig: this.sig, - }; - } - static fromJSON(data) { - const { keyid, sig } = data; - if (typeof keyid !== 'string') { - throw new TypeError('keyid must be a string'); - } - if (typeof sig !== 'string') { - throw new TypeError('sig must be a string'); - } - return new Signature({ - keyID: keyid, - sig: sig, - }); - } -} -exports.Signature = Signature; diff --git a/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/@tufjs/models/dist/snapshot.js deleted file mode 100644 index e90ea8e729e4e..0000000000000 --- a/node_modules/@tufjs/models/dist/snapshot.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Snapshot = void 0; -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const file_1 = require("./file"); -const utils_1 = require("./utils"); -/** - * A container for the signed part of snapshot metadata. - * - * Snapshot contains information about all target Metadata files. - * A top-level role that specifies the latest versions of all targets metadata files, - * and hence the latest versions of all targets (including any dependencies between them) on the repository. - */ -class Snapshot extends base_1.Signed { - constructor(opts) { - super(opts); - this.type = base_1.MetadataKind.Snapshot; - this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; - } - equals(other) { - if (!(other instanceof Snapshot)) { - return false; - } - return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta); - } - toJSON() { - return { - _type: this.type, - meta: metaToJSON(this.meta), - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { meta, ...rest } = unrecognizedFields; - return new Snapshot({ - ...commonFields, - meta: metaFromJSON(meta), - unrecognizedFields: rest, - }); - } -} -exports.Snapshot = Snapshot; -function metaToJSON(meta) { - return Object.entries(meta).reduce((acc, [path, metadata]) => ({ - ...acc, - [path]: metadata.toJSON(), - }), {}); -} -function metaFromJSON(data) { - let meta; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('meta field is malformed'); - } - else { - meta = Object.entries(data).reduce((acc, [path, metadata]) => ({ - ...acc, - [path]: file_1.MetaFile.fromJSON(metadata), - }), {}); - } - } - return meta; -} diff --git a/node_modules/@tufjs/models/dist/targets.js b/node_modules/@tufjs/models/dist/targets.js deleted file mode 100644 index 54bd8f8c554af..0000000000000 --- a/node_modules/@tufjs/models/dist/targets.js +++ /dev/null @@ -1,92 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Targets = void 0; -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const delegations_1 = require("./delegations"); -const file_1 = require("./file"); -const utils_1 = require("./utils"); -// Container for the signed part of targets metadata. -// -// Targets contains verifying information about target files and also delegates -// responsible to other Targets roles. -class Targets extends base_1.Signed { - constructor(options) { - super(options); - this.type = base_1.MetadataKind.Targets; - this.targets = options.targets || {}; - this.delegations = options.delegations; - } - addTarget(target) { - this.targets[target.path] = target; - } - equals(other) { - if (!(other instanceof Targets)) { - return false; - } - return (super.equals(other) && - util_1.default.isDeepStrictEqual(this.targets, other.targets) && - util_1.default.isDeepStrictEqual(this.delegations, other.delegations)); - } - toJSON() { - const json = { - _type: this.type, - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - targets: targetsToJSON(this.targets), - ...this.unrecognizedFields, - }; - if (this.delegations) { - json.delegations = this.delegations.toJSON(); - } - return json; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { targets, delegations, ...rest } = unrecognizedFields; - return new Targets({ - ...commonFields, - targets: targetsFromJSON(targets), - delegations: delegationsFromJSON(delegations), - unrecognizedFields: rest, - }); - } -} -exports.Targets = Targets; -function targetsToJSON(targets) { - return Object.entries(targets).reduce((acc, [path, target]) => ({ - ...acc, - [path]: target.toJSON(), - }), {}); -} -function targetsFromJSON(data) { - let targets; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('targets must be an object'); - } - else { - targets = Object.entries(data).reduce((acc, [path, target]) => ({ - ...acc, - [path]: file_1.TargetFile.fromJSON(path, target), - }), {}); - } - } - return targets; -} -function delegationsFromJSON(data) { - let delegations; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObject(data)) { - throw new TypeError('delegations must be an object'); - } - else { - delegations = delegations_1.Delegations.fromJSON(data); - } - } - return delegations; -} diff --git a/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/@tufjs/models/dist/timestamp.js deleted file mode 100644 index 9880c4c9fc254..0000000000000 --- a/node_modules/@tufjs/models/dist/timestamp.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Timestamp = void 0; -const base_1 = require("./base"); -const file_1 = require("./file"); -const utils_1 = require("./utils"); -/** - * A container for the signed part of timestamp metadata. - * - * A top-level that specifies the latest version of the snapshot role metadata file, - * and hence the latest versions of all metadata and targets on the repository. - */ -class Timestamp extends base_1.Signed { - constructor(options) { - super(options); - this.type = base_1.MetadataKind.Timestamp; - this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); - } - equals(other) { - if (!(other instanceof Timestamp)) { - return false; - } - return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta); - } - toJSON() { - return { - _type: this.type, - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - meta: { 'snapshot.json': this.snapshotMeta.toJSON() }, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { meta, ...rest } = unrecognizedFields; - return new Timestamp({ - ...commonFields, - snapshotMeta: snapshotMetaFromJSON(meta), - unrecognizedFields: rest, - }); - } -} -exports.Timestamp = Timestamp; -function snapshotMetaFromJSON(data) { - let snapshotMeta; - if (utils_1.guard.isDefined(data)) { - const snapshotData = data['snapshot.json']; - if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { - throw new TypeError('missing snapshot.json in meta'); - } - else { - snapshotMeta = file_1.MetaFile.fromJSON(snapshotData); - } - } - return snapshotMeta; -} diff --git a/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/@tufjs/models/dist/utils/guard.js deleted file mode 100644 index efe558852303c..0000000000000 --- a/node_modules/@tufjs/models/dist/utils/guard.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0; -function isDefined(val) { - return val !== undefined; -} -exports.isDefined = isDefined; -function isObject(value) { - return typeof value === 'object' && value !== null; -} -exports.isObject = isObject; -function isStringArray(value) { - return Array.isArray(value) && value.every((v) => typeof v === 'string'); -} -exports.isStringArray = isStringArray; -function isObjectArray(value) { - return Array.isArray(value) && value.every(isObject); -} -exports.isObjectArray = isObjectArray; -function isStringRecord(value) { - return (typeof value === 'object' && - value !== null && - Object.keys(value).every((k) => typeof k === 'string') && - Object.values(value).every((v) => typeof v === 'string')); -} -exports.isStringRecord = isStringRecord; -function isObjectRecord(value) { - return (typeof value === 'object' && - value !== null && - Object.keys(value).every((k) => typeof k === 'string') && - Object.values(value).every((v) => typeof v === 'object' && v !== null)); -} -exports.isObjectRecord = isObjectRecord; diff --git a/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/@tufjs/models/dist/utils/index.js deleted file mode 100644 index 872aae28049c9..0000000000000 --- a/node_modules/@tufjs/models/dist/utils/index.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.crypto = exports.guard = void 0; -exports.guard = __importStar(require("./guard")); -exports.crypto = __importStar(require("./verify")); diff --git a/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/@tufjs/models/dist/utils/key.js deleted file mode 100644 index 1f795ba1a2733..0000000000000 --- a/node_modules/@tufjs/models/dist/utils/key.js +++ /dev/null @@ -1,143 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getPublicKey = void 0; -const crypto_1 = __importDefault(require("crypto")); -const error_1 = require("../error"); -const oid_1 = require("./oid"); -const ASN1_TAG_SEQUENCE = 0x30; -const ANS1_TAG_BIT_STRING = 0x03; -const NULL_BYTE = 0x00; -const OID_EDDSA = '1.3.101.112'; -const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1'; -const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7'; -const PEM_HEADER = '-----BEGIN PUBLIC KEY-----'; -function getPublicKey(keyInfo) { - switch (keyInfo.keyType) { - case 'rsa': - return getRSAPublicKey(keyInfo); - case 'ed25519': - return getED25519PublicKey(keyInfo); - case 'ecdsa': - case 'ecdsa-sha2-nistp256': - case 'ecdsa-sha2-nistp384': - return getECDCSAPublicKey(keyInfo); - default: - throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`); - } -} -exports.getPublicKey = getPublicKey; -function getRSAPublicKey(keyInfo) { - // Only support PEM-encoded RSA keys - if (!keyInfo.keyVal.startsWith(PEM_HEADER)) { - throw new error_1.CryptoError('Invalid key format'); - } - const key = crypto_1.default.createPublicKey(keyInfo.keyVal); - switch (keyInfo.scheme) { - case 'rsassa-pss-sha256': - return { - key: key, - padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING, - }; - default: - throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`); - } -} -function getED25519PublicKey(keyInfo) { - let key; - // If key is already PEM-encoded we can just parse it - if (keyInfo.keyVal.startsWith(PEM_HEADER)) { - key = crypto_1.default.createPublicKey(keyInfo.keyVal); - } - else { - // If key is not PEM-encoded it had better be hex - if (!isHex(keyInfo.keyVal)) { - throw new error_1.CryptoError('Invalid key format'); - } - key = crypto_1.default.createPublicKey({ - key: ed25519.hexToDER(keyInfo.keyVal), - format: 'der', - type: 'spki', - }); - } - return { key }; -} -function getECDCSAPublicKey(keyInfo) { - let key; - // If key is already PEM-encoded we can just parse it - if (keyInfo.keyVal.startsWith(PEM_HEADER)) { - key = crypto_1.default.createPublicKey(keyInfo.keyVal); - } - else { - // If key is not PEM-encoded it had better be hex - if (!isHex(keyInfo.keyVal)) { - throw new error_1.CryptoError('Invalid key format'); - } - key = crypto_1.default.createPublicKey({ - key: ecdsa.hexToDER(keyInfo.keyVal), - format: 'der', - type: 'spki', - }); - } - return { key }; -} -const ed25519 = { - // Translates a hex key into a crypto KeyObject - // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/ - hexToDER: (hex) => { - const key = Buffer.from(hex, 'hex'); - const oid = (0, oid_1.encodeOIDString)(OID_EDDSA); - // Create a byte sequence containing the OID and key - const elements = Buffer.concat([ - Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([oid.length]), - oid, - ]), - Buffer.concat([ - Buffer.from([ANS1_TAG_BIT_STRING]), - Buffer.from([key.length + 1]), - Buffer.from([NULL_BYTE]), - key, - ]), - ]); - // Wrap up by creating a sequence of elements - const der = Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([elements.length]), - elements, - ]); - return der; - }, -}; -const ecdsa = { - hexToDER: (hex) => { - const key = Buffer.from(hex, 'hex'); - const bitString = Buffer.concat([ - Buffer.from([ANS1_TAG_BIT_STRING]), - Buffer.from([key.length + 1]), - Buffer.from([NULL_BYTE]), - key, - ]); - const oids = Buffer.concat([ - (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY), - (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1), - ]); - const oidSequence = Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([oids.length]), - oids, - ]); - // Wrap up by creating a sequence of elements - const der = Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([oidSequence.length + bitString.length]), - oidSequence, - bitString, - ]); - return der; - }, -}; -const isHex = (key) => /^[0-9a-fA-F]+$/.test(key); diff --git a/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/@tufjs/models/dist/utils/oid.js deleted file mode 100644 index e1bb7af5e54fb..0000000000000 --- a/node_modules/@tufjs/models/dist/utils/oid.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.encodeOIDString = void 0; -const ANS1_TAG_OID = 0x06; -function encodeOIDString(oid) { - const parts = oid.split('.'); - // The first two subidentifiers are encoded into the first byte - const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10); - const rest = []; - parts.slice(2).forEach((part) => { - const bytes = encodeVariableLengthInteger(parseInt(part, 10)); - rest.push(...bytes); - }); - const der = Buffer.from([first, ...rest]); - return Buffer.from([ANS1_TAG_OID, der.length, ...der]); -} -exports.encodeOIDString = encodeOIDString; -function encodeVariableLengthInteger(value) { - const bytes = []; - let mask = 0x00; - while (value > 0) { - bytes.unshift((value & 0x7f) | mask); - value >>= 7; - mask = 0x80; - } - return bytes; -} diff --git a/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/@tufjs/models/dist/utils/types.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/@tufjs/models/dist/utils/types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/@tufjs/models/dist/utils/verify.js deleted file mode 100644 index 8232b6f6a97ab..0000000000000 --- a/node_modules/@tufjs/models/dist/utils/verify.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifySignature = void 0; -const canonical_json_1 = require("@tufjs/canonical-json"); -const crypto_1 = __importDefault(require("crypto")); -const verifySignature = (metaDataSignedData, key, signature) => { - const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData)); - return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex')); -}; -exports.verifySignature = verifySignature; diff --git a/node_modules/@tufjs/models/package.json b/node_modules/@tufjs/models/package.json deleted file mode 100644 index be581591a0f3a..0000000000000 --- a/node_modules/@tufjs/models/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@tufjs/models", - "version": "2.0.1", - "description": "TUF metadata models", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "files": [ - "dist" - ], - "scripts": { - "build": "tsc --build", - "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", - "test": "jest" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/theupdateframework/tuf-js.git" - }, - "keywords": [ - "tuf", - "security", - "update" - ], - "author": "bdehamer@github.com", - "license": "MIT", - "bugs": { - "url": "https://github.com/theupdateframework/tuf-js/issues" - }, - "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", - "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.4" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE b/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js deleted file mode 100644 index ade736407554c..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js +++ /dev/null @@ -1,100 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toMessageSignatureBundle = toMessageSignatureBundle; -exports.toDSSEBundle = toDSSEBundle; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const bundle_1 = require("./bundle"); -// Message signature bundle - $case: 'messageSignature' -function toMessageSignatureBundle(options) { - return { - mediaType: options.certificateChain - ? bundle_1.BUNDLE_V02_MEDIA_TYPE - : bundle_1.BUNDLE_V03_MEDIA_TYPE, - content: { - $case: 'messageSignature', - messageSignature: { - messageDigest: { - algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, - digest: options.digest, - }, - signature: options.signature, - }, - }, - verificationMaterial: toVerificationMaterial(options), - }; -} -// DSSE envelope bundle - $case: 'dsseEnvelope' -function toDSSEBundle(options) { - return { - mediaType: options.certificateChain - ? bundle_1.BUNDLE_V02_MEDIA_TYPE - : bundle_1.BUNDLE_V03_MEDIA_TYPE, - content: { - $case: 'dsseEnvelope', - dsseEnvelope: toEnvelope(options), - }, - verificationMaterial: toVerificationMaterial(options), - }; -} -function toEnvelope(options) { - return { - payloadType: options.artifactType, - payload: options.artifact, - signatures: [toSignature(options)], - }; -} -function toSignature(options) { - return { - keyid: options.keyHint || '', - sig: options.signature, - }; -} -// Verification material -function toVerificationMaterial(options) { - return { - content: toKeyContent(options), - tlogEntries: [], - timestampVerificationData: { rfc3161Timestamps: [] }, - }; -} -function toKeyContent(options) { - if (options.certificate) { - if (options.certificateChain) { - return { - $case: 'x509CertificateChain', - x509CertificateChain: { - certificates: [{ rawBytes: options.certificate }], - }, - }; - } - else { - return { - $case: 'certificate', - certificate: { rawBytes: options.certificate }, - }; - } - } - else { - return { - $case: 'publicKey', - publicKey: { - hint: options.keyHint || '', - }, - }; - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js deleted file mode 100644 index eb67a0ddc17bb..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0; -exports.isBundleWithCertificateChain = isBundleWithCertificateChain; -exports.isBundleWithPublicKey = isBundleWithPublicKey; -exports.isBundleWithMessageSignature = isBundleWithMessageSignature; -exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope; -exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; -exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2'; -exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3'; -exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json'; -// Type guards for bundle variants. -function isBundleWithCertificateChain(b) { - return b.verificationMaterial.content.$case === 'x509CertificateChain'; -} -function isBundleWithPublicKey(b) { - return b.verificationMaterial.content.$case === 'publicKey'; -} -function isBundleWithMessageSignature(b) { - return b.content.$case === 'messageSignature'; -} -function isBundleWithDsseEnvelope(b) { - return b.content.$case === 'dsseEnvelope'; -} diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js deleted file mode 100644 index f84295323b812..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ValidationError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class ValidationError extends Error { - constructor(message, fields) { - super(message); - this.fields = fields; - } -} -exports.ValidationError = ValidationError; diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js deleted file mode 100644 index 1b012acad4d85..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var build_1 = require("./build"); -Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } }); -Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } }); -var bundle_1 = require("./bundle"); -Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } }); -Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } }); -Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } }); -Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } }); -Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } }); -Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } }); -Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } }); -Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } }); -var serialized_1 = require("./serialized"); -Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } }); -Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } }); -Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } }); -Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } }); -var validate_1 = require("./validate"); -Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } }); -Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } }); -Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } }); -Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } }); -Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js deleted file mode 100644 index be0d2a2d54d09..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const bundle_1 = require("./bundle"); -const validate_1 = require("./validate"); -const bundleFromJSON = (obj) => { - const bundle = protobuf_specs_1.Bundle.fromJSON(obj); - switch (bundle.mediaType) { - case bundle_1.BUNDLE_V01_MEDIA_TYPE: - (0, validate_1.assertBundleV01)(bundle); - break; - case bundle_1.BUNDLE_V02_MEDIA_TYPE: - (0, validate_1.assertBundleV02)(bundle); - break; - default: - (0, validate_1.assertBundleLatest)(bundle); - break; - } - return bundle; -}; -exports.bundleFromJSON = bundleFromJSON; -const bundleToJSON = (bundle) => { - return protobuf_specs_1.Bundle.toJSON(bundle); -}; -exports.bundleToJSON = bundleToJSON; -const envelopeFromJSON = (obj) => { - return protobuf_specs_1.Envelope.fromJSON(obj); -}; -exports.envelopeFromJSON = envelopeFromJSON; -const envelopeToJSON = (envelope) => { - return protobuf_specs_1.Envelope.toJSON(envelope); -}; -exports.envelopeToJSON = envelopeToJSON; diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js deleted file mode 100644 index 21b8b5ee293ba..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js +++ /dev/null @@ -1,199 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.assertBundle = assertBundle; -exports.assertBundleV01 = assertBundleV01; -exports.isBundleV01 = isBundleV01; -exports.assertBundleV02 = assertBundleV02; -exports.assertBundleLatest = assertBundleLatest; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("./error"); -// Performs basic validation of a Sigstore bundle to ensure that all required -// fields are populated. This is not a complete validation of the bundle, but -// rather a check that the bundle is in a valid state to be processed by the -// rest of the code. -function assertBundle(b) { - const invalidValues = validateBundleBase(b); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid bundle', invalidValues); - } -} -// Asserts that the given bundle conforms to the v0.1 bundle format. -function assertBundleV01(b) { - const invalidValues = []; - invalidValues.push(...validateBundleBase(b)); - invalidValues.push(...validateInclusionPromise(b)); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues); - } -} -// Type guard to determine if Bundle is a v0.1 bundle. -function isBundleV01(b) { - try { - assertBundleV01(b); - return true; - } - catch (e) { - return false; - } -} -// Asserts that the given bundle conforms to the v0.2 bundle format. -function assertBundleV02(b) { - const invalidValues = []; - invalidValues.push(...validateBundleBase(b)); - invalidValues.push(...validateInclusionProof(b)); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues); - } -} -// Asserts that the given bundle conforms to the newest (0.3) bundle format. -function assertBundleLatest(b) { - const invalidValues = []; - invalidValues.push(...validateBundleBase(b)); - invalidValues.push(...validateInclusionProof(b)); - invalidValues.push(...validateNoCertificateChain(b)); - if (invalidValues.length > 0) { - throw new error_1.ValidationError('invalid bundle', invalidValues); - } -} -function validateBundleBase(b) { - const invalidValues = []; - // Media type validation - if (b.mediaType === undefined || - (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) && - !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) { - invalidValues.push('mediaType'); - } - // Content-related validation - if (b.content === undefined) { - invalidValues.push('content'); - } - else { - switch (b.content.$case) { - case 'messageSignature': - if (b.content.messageSignature.messageDigest === undefined) { - invalidValues.push('content.messageSignature.messageDigest'); - } - else { - if (b.content.messageSignature.messageDigest.digest.length === 0) { - invalidValues.push('content.messageSignature.messageDigest.digest'); - } - } - if (b.content.messageSignature.signature.length === 0) { - invalidValues.push('content.messageSignature.signature'); - } - break; - case 'dsseEnvelope': - if (b.content.dsseEnvelope.payload.length === 0) { - invalidValues.push('content.dsseEnvelope.payload'); - } - if (b.content.dsseEnvelope.signatures.length !== 1) { - invalidValues.push('content.dsseEnvelope.signatures'); - } - else { - if (b.content.dsseEnvelope.signatures[0].sig.length === 0) { - invalidValues.push('content.dsseEnvelope.signatures[0].sig'); - } - } - break; - } - } - // Verification material-related validation - if (b.verificationMaterial === undefined) { - invalidValues.push('verificationMaterial'); - } - else { - if (b.verificationMaterial.content === undefined) { - invalidValues.push('verificationMaterial.content'); - } - else { - switch (b.verificationMaterial.content.$case) { - case 'x509CertificateChain': - if (b.verificationMaterial.content.x509CertificateChain.certificates - .length === 0) { - invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates'); - } - b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => { - if (cert.rawBytes.length === 0) { - invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`); - } - }); - break; - case 'certificate': - if (b.verificationMaterial.content.certificate.rawBytes.length === 0) { - invalidValues.push('verificationMaterial.content.certificate.rawBytes'); - } - break; - } - } - if (b.verificationMaterial.tlogEntries === undefined) { - invalidValues.push('verificationMaterial.tlogEntries'); - } - else { - if (b.verificationMaterial.tlogEntries.length > 0) { - b.verificationMaterial.tlogEntries.forEach((entry, i) => { - if (entry.logId === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`); - } - if (entry.kindVersion === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`); - } - }); - } - } - } - return invalidValues; -} -// Necessary for V01 bundles -function validateInclusionPromise(b) { - const invalidValues = []; - if (b.verificationMaterial && - b.verificationMaterial.tlogEntries?.length > 0) { - b.verificationMaterial.tlogEntries.forEach((entry, i) => { - if (entry.inclusionPromise === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`); - } - }); - } - return invalidValues; -} -// Necessary for V02 and later bundles -function validateInclusionProof(b) { - const invalidValues = []; - if (b.verificationMaterial && - b.verificationMaterial.tlogEntries?.length > 0) { - b.verificationMaterial.tlogEntries.forEach((entry, i) => { - if (entry.inclusionProof === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`); - } - else { - if (entry.inclusionProof.checkpoint === undefined) { - invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`); - } - } - }); - } - return invalidValues; -} -// Necessary for V03 and later bundles -function validateNoCertificateChain(b) { - const invalidValues = []; - /* istanbul ignore next */ - if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') { - invalidValues.push('verificationMaterial.content.$case'); - } - return invalidValues; -} diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/package.json b/node_modules/pacote/node_modules/@sigstore/bundle/package.json deleted file mode 100644 index ee5d2b92b801a..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/bundle/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "@sigstore/bundle", - "version": "3.0.0", - "description": "Sigstore bundle type", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist", - "store" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme", - "publishConfig": { - "provenance": true - }, - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/LICENSE b/node_modules/pacote/node_modules/@sigstore/core/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js deleted file mode 100644 index 17d93b0f7e706..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1TypeError = exports.ASN1ParseError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class ASN1ParseError extends Error { -} -exports.ASN1ParseError = ASN1ParseError; -class ASN1TypeError extends Error { -} -exports.ASN1TypeError = ASN1TypeError; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js deleted file mode 100644 index 348b2ea4022e5..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1Obj = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var obj_1 = require("./obj"); -Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js deleted file mode 100644 index cb7ebf09dbefa..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.decodeLength = decodeLength; -exports.encodeLength = encodeLength; -const error_1 = require("./error"); -// Decodes the length of a DER-encoded ANS.1 element from the supplied stream. -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes -function decodeLength(stream) { - const buf = stream.getUint8(); - // If the most significant bit is UNSET the length is just the value of the - // byte. - if ((buf & 0x80) === 0x00) { - return buf; - } - // Otherwise, the lower 7 bits of the first byte indicate the number of bytes - // that follow to encode the length. - const byteCount = buf & 0x7f; - // Ensure the encoded length can safely fit in a JS number. - if (byteCount > 6) { - throw new error_1.ASN1ParseError('length exceeds 6 byte limit'); - } - // Iterate over the bytes that encode the length. - let len = 0; - for (let i = 0; i < byteCount; i++) { - len = len * 256 + stream.getUint8(); - } - // This is a valid ASN.1 length encoding, but we don't support it. - if (len === 0) { - throw new error_1.ASN1ParseError('indefinite length encoding not supported'); - } - return len; -} -// Translates the supplied value to a DER-encoded length. -function encodeLength(len) { - if (len < 128) { - return Buffer.from([len]); - } - // Bitwise operations on large numbers are not supported in JS, so we need to - // use BigInts. - let val = BigInt(len); - const bytes = []; - while (val > 0n) { - bytes.unshift(Number(val & 255n)); - val = val >> 8n; - } - return Buffer.from([0x80 | bytes.length, ...bytes]); -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js deleted file mode 100644 index 5f9ac9cdbc493..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js +++ /dev/null @@ -1,152 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1Obj = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const stream_1 = require("../stream"); -const error_1 = require("./error"); -const length_1 = require("./length"); -const parse_1 = require("./parse"); -const tag_1 = require("./tag"); -class ASN1Obj { - constructor(tag, value, subs) { - this.tag = tag; - this.value = value; - this.subs = subs; - } - // Constructs an ASN.1 object from a Buffer of DER-encoded bytes. - static parseBuffer(buf) { - return parseStream(new stream_1.ByteStream(buf)); - } - toDER() { - const valueStream = new stream_1.ByteStream(); - if (this.subs.length > 0) { - for (const sub of this.subs) { - valueStream.appendView(sub.toDER()); - } - } - else { - valueStream.appendView(this.value); - } - const value = valueStream.buffer; - // Concat tag/length/value - const obj = new stream_1.ByteStream(); - obj.appendChar(this.tag.toDER()); - obj.appendView((0, length_1.encodeLength)(value.length)); - obj.appendView(value); - return obj.buffer; - } - ///////////////////////////////////////////////////////////////////////////// - // Convenience methods for parsing ASN.1 primitives into JS types - // Returns the ASN.1 object's value as a boolean. Throws an error if the - // object is not a boolean. - toBoolean() { - if (!this.tag.isBoolean()) { - throw new error_1.ASN1TypeError('not a boolean'); - } - return (0, parse_1.parseBoolean)(this.value); - } - // Returns the ASN.1 object's value as a BigInt. Throws an error if the - // object is not an integer. - toInteger() { - if (!this.tag.isInteger()) { - throw new error_1.ASN1TypeError('not an integer'); - } - return (0, parse_1.parseInteger)(this.value); - } - // Returns the ASN.1 object's value as an OID string. Throws an error if the - // object is not an OID. - toOID() { - if (!this.tag.isOID()) { - throw new error_1.ASN1TypeError('not an OID'); - } - return (0, parse_1.parseOID)(this.value); - } - // Returns the ASN.1 object's value as a Date. Throws an error if the object - // is not either a UTCTime or a GeneralizedTime. - toDate() { - switch (true) { - case this.tag.isUTCTime(): - return (0, parse_1.parseTime)(this.value, true); - case this.tag.isGeneralizedTime(): - return (0, parse_1.parseTime)(this.value, false); - default: - throw new error_1.ASN1TypeError('not a date'); - } - } - // Returns the ASN.1 object's value as a number[] where each number is the - // value of a bit in the bit string. Throws an error if the object is not a - // bit string. - toBitString() { - if (!this.tag.isBitString()) { - throw new error_1.ASN1TypeError('not a bit string'); - } - return (0, parse_1.parseBitString)(this.value); - } -} -exports.ASN1Obj = ASN1Obj; -///////////////////////////////////////////////////////////////////////////// -// Internal stream parsing functions -function parseStream(stream) { - // Parse tag, length, and value from stream - const tag = new tag_1.ASN1Tag(stream.getUint8()); - const len = (0, length_1.decodeLength)(stream); - const value = stream.slice(stream.position, len); - const start = stream.position; - let subs = []; - // If the object is constructed, parse its children. Sometimes, children - // are embedded in OCTESTRING objects, so we need to check those - // for children as well. - if (tag.constructed) { - subs = collectSubs(stream, len); - } - else if (tag.isOctetString()) { - // Attempt to parse children of OCTETSTRING objects. If anything fails, - // assume the object is not constructed and treat as primitive. - try { - subs = collectSubs(stream, len); - } - catch (e) { - // Fail silently and treat as primitive - } - } - // If there are no children, move stream cursor to the end of the object - if (subs.length === 0) { - stream.seek(start + len); - } - return new ASN1Obj(tag, value, subs); -} -function collectSubs(stream, len) { - // Calculate end of object content - const end = stream.position + len; - // Make sure there are enough bytes left in the stream. This should never - // happen, cause it'll get caught when the stream is sliced in parseStream. - // Leaving as an extra check just in case. - /* istanbul ignore if */ - if (end > stream.length) { - throw new error_1.ASN1ParseError('invalid length'); - } - // Parse all children - const subs = []; - while (stream.position < end) { - subs.push(parseStream(stream)); - } - // When we're done parsing children, we should be at the end of the object - if (stream.position !== end) { - throw new error_1.ASN1ParseError('invalid length'); - } - return subs; -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js deleted file mode 100644 index 7fbb42632c60e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js +++ /dev/null @@ -1,124 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.parseInteger = parseInteger; -exports.parseStringASCII = parseStringASCII; -exports.parseTime = parseTime; -exports.parseOID = parseOID; -exports.parseBoolean = parseBoolean; -exports.parseBitString = parseBitString; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; -const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; -// Parse a BigInt from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer -function parseInteger(buf) { - let pos = 0; - const end = buf.length; - let val = buf[pos]; - const neg = val > 0x7f; - // Consume any padding bytes - const pad = neg ? 0xff : 0x00; - while (val == pad && ++pos < end) { - val = buf[pos]; - } - // Calculate remaining bytes to read - const len = end - pos; - if (len === 0) - return BigInt(neg ? -1 : 0); - // Handle two's complement for negative numbers - val = neg ? val - 256 : val; - // Parse remaining bytes - let n = BigInt(val); - for (let i = pos + 1; i < end; ++i) { - n = n * BigInt(256) + BigInt(buf[i]); - } - return n; -} -// Parse an ASCII string from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean -function parseStringASCII(buf) { - return buf.toString('ascii'); -} -// Parse a Date from the DER-encoded buffer -// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1 -function parseTime(buf, shortYear) { - const timeStr = parseStringASCII(buf); - // Parse the time string into matches - captured groups start at index 1 - const m = shortYear - ? RE_TIME_SHORT_YEAR.exec(timeStr) - : RE_TIME_LONG_YEAR.exec(timeStr); - if (!m) { - throw new Error('invalid time'); - } - // Translate dates with a 2-digit year to 4 digits per the spec - if (shortYear) { - let year = Number(m[1]); - year += year >= 50 ? 1900 : 2000; - m[1] = year.toString(); - } - // Translate to ISO8601 format and parse - return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`); -} -// Parse an OID from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier -function parseOID(buf) { - let pos = 0; - const end = buf.length; - // Consume first byte which encodes the first two OID components - let n = buf[pos++]; - const first = Math.floor(n / 40); - const second = n % 40; - let oid = `${first}.${second}`; - // Consume remaining bytes - let val = 0; - for (; pos < end; ++pos) { - n = buf[pos]; - val = (val << 7) + (n & 0x7f); - // If the left-most bit is NOT set, then this is the last byte in the - // sequence and we can add the value to the OID and reset the accumulator - if ((n & 0x80) === 0) { - oid += `.${val}`; - val = 0; - } - } - return oid; -} -// Parse a boolean from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean -function parseBoolean(buf) { - return buf[0] !== 0; -} -// Parse a bit string from the DER-encoded buffer -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string -function parseBitString(buf) { - // First byte tell us how many unused bits are in the last byte - const unused = buf[0]; - const start = 1; - const end = buf.length; - const bits = []; - for (let i = start; i < end; ++i) { - const byte = buf[i]; - // The skip value is only used for the last byte - const skip = i === end - 1 ? unused : 0; - // Iterate over each bit in the byte (most significant first) - for (let j = 7; j >= skip; --j) { - // Read the bit and add it to the bit string - bits.push((byte >> j) & 0x01); - } - } - return bits; -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js deleted file mode 100644 index 84dd938d049aa..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js +++ /dev/null @@ -1,86 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ASN1Tag = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("./error"); -const UNIVERSAL_TAG = { - BOOLEAN: 0x01, - INTEGER: 0x02, - BIT_STRING: 0x03, - OCTET_STRING: 0x04, - OBJECT_IDENTIFIER: 0x06, - SEQUENCE: 0x10, - SET: 0x11, - PRINTABLE_STRING: 0x13, - UTC_TIME: 0x17, - GENERALIZED_TIME: 0x18, -}; -const TAG_CLASS = { - UNIVERSAL: 0x00, - APPLICATION: 0x01, - CONTEXT_SPECIFIC: 0x02, - PRIVATE: 0x03, -}; -// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes -class ASN1Tag { - constructor(enc) { - // Bits 0 through 4 are the tag number - this.number = enc & 0x1f; - // Bit 5 is the constructed bit - this.constructed = (enc & 0x20) === 0x20; - // Bit 6 & 7 are the class - this.class = enc >> 6; - if (this.number === 0x1f) { - throw new error_1.ASN1ParseError('long form tags not supported'); - } - if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) { - throw new error_1.ASN1ParseError('unsupported tag 0x00'); - } - } - isUniversal() { - return this.class === TAG_CLASS.UNIVERSAL; - } - isContextSpecific(num) { - const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC; - return num !== undefined ? res && this.number === num : res; - } - isBoolean() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN; - } - isInteger() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER; - } - isBitString() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING; - } - isOctetString() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING; - } - isOID() { - return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER); - } - isUTCTime() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME; - } - isGeneralizedTime() { - return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME; - } - toDER() { - return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6); - } -} -exports.ASN1Tag = ASN1Tag; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js b/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js deleted file mode 100644 index 296b5ba43e86a..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createPublicKey = createPublicKey; -exports.digest = digest; -exports.verify = verify; -exports.bufferEqual = bufferEqual; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const crypto_1 = __importDefault(require("crypto")); -function createPublicKey(key, type = 'spki') { - if (typeof key === 'string') { - return crypto_1.default.createPublicKey(key); - } - else { - return crypto_1.default.createPublicKey({ key, format: 'der', type: type }); - } -} -function digest(algorithm, ...data) { - const hash = crypto_1.default.createHash(algorithm); - for (const d of data) { - hash.update(d); - } - return hash.digest(); -} -function verify(data, key, signature, algorithm) { - // The try/catch is to work around an issue in Node 14.x where verify throws - // an error in some scenarios if the signature is invalid. - try { - return crypto_1.default.verify(algorithm, data, key, signature); - } - catch (e) { - /* istanbul ignore next */ - return false; - } -} -function bufferEqual(a, b) { - try { - return crypto_1.default.timingSafeEqual(a, b); - } - catch { - /* istanbul ignore next */ - return false; - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js deleted file mode 100644 index ca7b63630e2ba..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.preAuthEncoding = preAuthEncoding; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const PAE_PREFIX = 'DSSEv1'; -// DSSE Pre-Authentication Encoding -function preAuthEncoding(payloadType, payload) { - const prefix = [ - PAE_PREFIX, - payloadType.length, - payloadType, - payload.length, - '', - ].join(' '); - return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]); -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js b/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js deleted file mode 100644 index 7113af66db4c2..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.base64Encode = base64Encode; -exports.base64Decode = base64Decode; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const BASE64_ENCODING = 'base64'; -const UTF8_ENCODING = 'utf-8'; -function base64Encode(str) { - return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING); -} -function base64Decode(str) { - return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING); -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/index.js deleted file mode 100644 index ac35e86a8df7d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/index.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var asn1_1 = require("./asn1"); -Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } }); -exports.crypto = __importStar(require("./crypto")); -exports.dsse = __importStar(require("./dsse")); -exports.encoding = __importStar(require("./encoding")); -exports.json = __importStar(require("./json")); -exports.pem = __importStar(require("./pem")); -var rfc3161_1 = require("./rfc3161"); -Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } }); -var stream_1 = require("./stream"); -Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } }); -var x509_1 = require("./x509"); -Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } }); -Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } }); -Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/json.js b/node_modules/pacote/node_modules/@sigstore/core/dist/json.js deleted file mode 100644 index 7808d033b98cc..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/json.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.canonicalize = canonicalize; -// JSON canonicalization per https://github.com/cyberphone/json-canonicalization -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function canonicalize(object) { - let buffer = ''; - if (object === null || typeof object !== 'object' || object.toJSON != null) { - // Primitives or toJSONable objects - buffer += JSON.stringify(object); - } - else if (Array.isArray(object)) { - // Array - maintain element order - buffer += '['; - let first = true; - object.forEach((element) => { - if (!first) { - buffer += ','; - } - first = false; - // recursive call - buffer += canonicalize(element); - }); - buffer += ']'; - } - else { - // Object - Sort properties before serializing - buffer += '{'; - let first = true; - Object.keys(object) - .sort() - .forEach((property) => { - if (!first) { - buffer += ','; - } - first = false; - buffer += JSON.stringify(property); - buffer += ':'; - // recursive call - buffer += canonicalize(object[property]); - }); - buffer += '}'; - } - return buffer; -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js b/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js deleted file mode 100644 index ac7a643067ad0..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js +++ /dev/null @@ -1,14 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0; -exports.ECDSA_SIGNATURE_ALGOS = { - '1.2.840.10045.4.3.1': 'sha224', - '1.2.840.10045.4.3.2': 'sha256', - '1.2.840.10045.4.3.3': 'sha384', - '1.2.840.10045.4.3.4': 'sha512', -}; -exports.SHA2_HASH_ALGOS = { - '2.16.840.1.101.3.4.2.1': 'sha256', - '2.16.840.1.101.3.4.2.2': 'sha384', - '2.16.840.1.101.3.4.2.3': 'sha512', -}; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js b/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js deleted file mode 100644 index f1241d28d586e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toDER = toDER; -exports.fromDER = fromDER; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const PEM_HEADER = /-----BEGIN (.*)-----/; -const PEM_FOOTER = /-----END (.*)-----/; -function toDER(certificate) { - let der = ''; - certificate.split('\n').forEach((line) => { - if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) { - return; - } - der += line; - }); - return Buffer.from(der, 'base64'); -} -// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM -// encoding dictates that each certificate should have a trailing newline after -// the footer. -function fromDER(certificate, type = 'CERTIFICATE') { - // Base64-encode the certificate. - const der = certificate.toString('base64'); - // Split the certificate into lines of 64 characters. - const lines = der.match(/.{1,64}/g) || ''; - return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`] - .join('\n') - .concat('\n'); -} diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js deleted file mode 100644 index b9b549b0bb323..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js +++ /dev/null @@ -1,21 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RFC3161TimestampVerificationError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class RFC3161TimestampVerificationError extends Error { -} -exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js deleted file mode 100644 index b77ecf1c7d50c..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RFC3161Timestamp = void 0; -var timestamp_1 = require("./timestamp"); -Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js deleted file mode 100644 index 3e61fc1a4e169..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js +++ /dev/null @@ -1,201 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RFC3161Timestamp = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const asn1_1 = require("../asn1"); -const crypto = __importStar(require("../crypto")); -const oid_1 = require("../oid"); -const error_1 = require("./error"); -const tstinfo_1 = require("./tstinfo"); -const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2'; -const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4'; -const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4'; -class RFC3161Timestamp { - constructor(asn1) { - this.root = asn1; - } - static parse(der) { - const asn1 = asn1_1.ASN1Obj.parseBuffer(der); - return new RFC3161Timestamp(asn1); - } - get status() { - return this.pkiStatusInfoObj.subs[0].toInteger(); - } - get contentType() { - return this.contentTypeObj.toOID(); - } - get eContentType() { - return this.eContentTypeObj.toOID(); - } - get signingTime() { - return this.tstInfo.genTime; - } - get signerIssuer() { - return this.signerSidObj.subs[0].value; - } - get signerSerialNumber() { - return this.signerSidObj.subs[1].value; - } - get signerDigestAlgorithm() { - const oid = this.signerDigestAlgorithmObj.subs[0].toOID(); - return oid_1.SHA2_HASH_ALGOS[oid]; - } - get signatureAlgorithm() { - const oid = this.signatureAlgorithmObj.subs[0].toOID(); - return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; - } - get signatureValue() { - return this.signatureValueObj.value; - } - get tstInfo() { - // Need to unpack tstInfo from an OCTET STRING - return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]); - } - verify(data, publicKey) { - if (!this.timeStampTokenObj) { - throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing'); - } - // Check for expected ContentInfo content type - if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) { - throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`); - } - // Check for expected encapsulated content type - if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) { - throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`); - } - // Check that the tstInfo references the correct artifact - this.tstInfo.verify(data); - // Check that the signed message digest matches the tstInfo - this.verifyMessageDigest(); - // Check that the signature is valid for the signed attributes - this.verifySignature(publicKey); - } - verifyMessageDigest() { - // Check that the tstInfo matches the signed data - const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw); - const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value; - if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) { - throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo'); - } - } - verifySignature(key) { - // Encode the signed attributes for verification - const signedAttrs = this.signedAttrsObj.toDER(); - signedAttrs[0] = 0x31; // Change context-specific tag to SET - // Check that the signature is valid for the signed attributes - const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm); - if (!verified) { - throw new error_1.RFC3161TimestampVerificationError('signature verification failed'); - } - } - // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 - get pkiStatusInfoObj() { - // pkiStatusInfo is the first element of the timestamp response sequence - return this.root.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 - get timeStampTokenObj() { - // timeStampToken is the first element of the timestamp response sequence - return this.root.subs[1]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-3 - get contentTypeObj() { - return this.timeStampTokenObj.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc5652#section-3 - get signedDataObj() { - const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); - return obj.subs[0]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 - get encapContentInfoObj() { - return this.signedDataObj.subs[2]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 - get signerInfosObj() { - // SignerInfos is the last element of the signed data sequence - const sd = this.signedDataObj; - return sd.subs[sd.subs.length - 1]; - } - // https://www.rfc-editor.org/rfc/rfc5652#section-5.1 - get signerInfoObj() { - // Only supporting one signer - return this.signerInfosObj.subs[0]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 - get eContentTypeObj() { - return this.encapContentInfoObj.subs[0]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 - get eContentObj() { - return this.encapContentInfoObj.subs[1]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signedAttrsObj() { - const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); - return signedAttrs; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get messageDigestAttributeObj() { - const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() && - sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY); - return messageDigest; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signerSidObj() { - return this.signerInfoObj.subs[1]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signerDigestAlgorithmObj() { - // Signature is the 2nd element of the signerInfoObj object - return this.signerInfoObj.subs[2]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signatureAlgorithmObj() { - // Signature is the 4th element of the signerInfoObj object - return this.signerInfoObj.subs[4]; - } - // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 - get signatureValueObj() { - // Signature is the 6th element of the signerInfoObj object - return this.signerInfoObj.subs[5]; - } -} -exports.RFC3161Timestamp = RFC3161Timestamp; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js deleted file mode 100644 index dc8e4fb339383..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSTInfo = void 0; -const crypto = __importStar(require("../crypto")); -const oid_1 = require("../oid"); -const error_1 = require("./error"); -class TSTInfo { - constructor(asn1) { - this.root = asn1; - } - get version() { - return this.root.subs[0].toInteger(); - } - get genTime() { - return this.root.subs[4].toDate(); - } - get messageImprintHashAlgorithm() { - const oid = this.messageImprintObj.subs[0].subs[0].toOID(); - return oid_1.SHA2_HASH_ALGOS[oid]; - } - get messageImprintHashedMessage() { - return this.messageImprintObj.subs[1].value; - } - get raw() { - return this.root.toDER(); - } - verify(data) { - const digest = crypto.digest(this.messageImprintHashAlgorithm, data); - if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) { - throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact'); - } - } - // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 - get messageImprintObj() { - return this.root.subs[2]; - } -} -exports.TSTInfo = TSTInfo; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js b/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js deleted file mode 100644 index 0a24f8582eb23..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js +++ /dev/null @@ -1,115 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ByteStream = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class StreamError extends Error { -} -class ByteStream { - constructor(buffer) { - this.start = 0; - if (buffer) { - this.buf = buffer; - this.view = Buffer.from(buffer); - } - else { - this.buf = new ArrayBuffer(0); - this.view = Buffer.from(this.buf); - } - } - get buffer() { - return this.view.subarray(0, this.start); - } - get length() { - return this.view.byteLength; - } - get position() { - return this.start; - } - seek(position) { - this.start = position; - } - // Returns a Buffer containing the specified number of bytes starting at the - // given start position. - slice(start, len) { - const end = start + len; - if (end > this.length) { - throw new StreamError('request past end of buffer'); - } - return this.view.subarray(start, end); - } - appendChar(char) { - this.ensureCapacity(1); - this.view[this.start] = char; - this.start += 1; - } - appendUint16(num) { - this.ensureCapacity(2); - const value = new Uint16Array([num]); - const view = new Uint8Array(value.buffer); - this.view[this.start] = view[1]; - this.view[this.start + 1] = view[0]; - this.start += 2; - } - appendUint24(num) { - this.ensureCapacity(3); - const value = new Uint32Array([num]); - const view = new Uint8Array(value.buffer); - this.view[this.start] = view[2]; - this.view[this.start + 1] = view[1]; - this.view[this.start + 2] = view[0]; - this.start += 3; - } - appendView(view) { - this.ensureCapacity(view.length); - this.view.set(view, this.start); - this.start += view.length; - } - getBlock(size) { - if (size <= 0) { - return Buffer.alloc(0); - } - if (this.start + size > this.view.length) { - throw new Error('request past end of buffer'); - } - const result = this.view.subarray(this.start, this.start + size); - this.start += size; - return result; - } - getUint8() { - return this.getBlock(1)[0]; - } - getUint16() { - const block = this.getBlock(2); - return (block[0] << 8) | block[1]; - } - ensureCapacity(size) { - if (this.start + size > this.view.byteLength) { - const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0); - this.realloc(this.view.byteLength + blockSize); - } - } - realloc(size) { - const newArray = new ArrayBuffer(size); - const newView = Buffer.from(newArray); - // Copy the old buffer into the new one - newView.set(this.view); - this.buf = newArray; - this.view = newView; - } -} -exports.ByteStream = ByteStream; -ByteStream.BLOCK_SIZE = 1024; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js deleted file mode 100644 index 72ea8e0738bc8..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js +++ /dev/null @@ -1,230 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const asn1_1 = require("../asn1"); -const crypto = __importStar(require("../crypto")); -const oid_1 = require("../oid"); -const pem = __importStar(require("../pem")); -const ext_1 = require("./ext"); -const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14'; -const EXTENSION_OID_KEY_USAGE = '2.5.29.15'; -const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17'; -const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19'; -const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35'; -exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2'; -class X509Certificate { - constructor(asn1) { - this.root = asn1; - } - static parse(cert) { - const der = typeof cert === 'string' ? pem.toDER(cert) : cert; - const asn1 = asn1_1.ASN1Obj.parseBuffer(der); - return new X509Certificate(asn1); - } - get tbsCertificate() { - return this.tbsCertificateObj; - } - get version() { - // version number is the first element of the version context specific tag - const ver = this.versionObj.subs[0].toInteger(); - return `v${(ver + BigInt(1)).toString()}`; - } - get serialNumber() { - return this.serialNumberObj.value; - } - get notBefore() { - // notBefore is the first element of the validity sequence - return this.validityObj.subs[0].toDate(); - } - get notAfter() { - // notAfter is the second element of the validity sequence - return this.validityObj.subs[1].toDate(); - } - get issuer() { - return this.issuerObj.value; - } - get subject() { - return this.subjectObj.value; - } - get publicKey() { - return this.subjectPublicKeyInfoObj.toDER(); - } - get signatureAlgorithm() { - const oid = this.signatureAlgorithmObj.subs[0].toOID(); - return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; - } - get signatureValue() { - // Signature value is a bit string, so we need to skip the first byte - return this.signatureValueObj.value.subarray(1); - } - get subjectAltName() { - const ext = this.extSubjectAltName; - return ext?.uri || /* istanbul ignore next */ ext?.rfc822Name; - } - get extensions() { - // The extension list is the first (and only) element of the extensions - // context specific tag - /* istanbul ignore next */ - const extSeq = this.extensionsObj?.subs[0]; - /* istanbul ignore next */ - return extSeq?.subs || []; - } - get extKeyUsage() { - const ext = this.findExtension(EXTENSION_OID_KEY_USAGE); - return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined; - } - get extBasicConstraints() { - const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS); - return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined; - } - get extSubjectAltName() { - const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME); - return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined; - } - get extAuthorityKeyID() { - const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID); - return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined; - } - get extSubjectKeyID() { - const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID); - return ext - ? new ext_1.X509SubjectKeyIDExtension(ext) - : /* istanbul ignore next */ undefined; - } - get extSCT() { - const ext = this.findExtension(exports.EXTENSION_OID_SCT); - return ext ? new ext_1.X509SCTExtension(ext) : undefined; - } - get isCA() { - const ca = this.extBasicConstraints?.isCA || false; - // If the KeyUsage extension is present, keyCertSign must be set - if (this.extKeyUsage) { - return ca && this.extKeyUsage.keyCertSign; - } - // TODO: test coverage for this case - /* istanbul ignore next */ - return ca; - } - extension(oid) { - const ext = this.findExtension(oid); - return ext ? new ext_1.X509Extension(ext) : undefined; - } - verify(issuerCertificate) { - // Use the issuer's public key if provided, otherwise use the subject's - const publicKey = issuerCertificate?.publicKey || this.publicKey; - const key = crypto.createPublicKey(publicKey); - return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm); - } - validForDate(date) { - return this.notBefore <= date && date <= this.notAfter; - } - equals(other) { - return this.root.toDER().equals(other.root.toDER()); - } - // Creates a copy of the certificate with a new buffer - clone() { - const der = this.root.toDER(); - const clone = Buffer.alloc(der.length); - der.copy(clone); - return X509Certificate.parse(clone); - } - findExtension(oid) { - // Find the extension with the given OID. The OID will always be the first - // element of the extension sequence - return this.extensions.find((ext) => ext.subs[0].toOID() === oid); - } - ///////////////////////////////////////////////////////////////////////////// - // The following properties use the documented x509 structure to locate the - // desired ASN.1 object - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1 - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1 - get tbsCertificateObj() { - // tbsCertificate is the first element of the certificate sequence - return this.root.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2 - get signatureAlgorithmObj() { - // signatureAlgorithm is the second element of the certificate sequence - return this.root.subs[1]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3 - get signatureValueObj() { - // signatureValue is the third element of the certificate sequence - return this.root.subs[2]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1 - get versionObj() { - // version is the first element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[0]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2 - get serialNumberObj() { - // serialNumber is the second element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[1]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4 - get issuerObj() { - // issuer is the fourth element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[3]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5 - get validityObj() { - // version is the fifth element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[4]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6 - get subjectObj() { - // subject is the sixth element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[5]; - } - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7 - get subjectPublicKeyInfoObj() { - // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence - return this.tbsCertificateObj.subs[6]; - } - // Extensions can't be located by index because their position varies. Instead, - // we need to find the extensions context specific tag - // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9 - get extensionsObj() { - return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03)); - } -} -exports.X509Certificate = X509Certificate; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js deleted file mode 100644 index 1d481261b0aa6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js +++ /dev/null @@ -1,145 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0; -const stream_1 = require("../stream"); -const sct_1 = require("./sct"); -// https://www.rfc-editor.org/rfc/rfc5280#section-4.1 -class X509Extension { - constructor(asn1) { - this.root = asn1; - } - get oid() { - return this.root.subs[0].toOID(); - } - get critical() { - // The critical field is optional and will be the second element of the - // extension sequence if present. Default to false if not present. - return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false; - } - get value() { - return this.extnValueObj.value; - } - get valueObj() { - return this.extnValueObj; - } - get extnValueObj() { - // The extnValue field will be the last element of the extension sequence - return this.root.subs[this.root.subs.length - 1]; - } -} -exports.X509Extension = X509Extension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9 -class X509BasicConstraintsExtension extends X509Extension { - get isCA() { - return this.sequence.subs[0]?.toBoolean() ?? false; - } - get pathLenConstraint() { - return this.sequence.subs.length > 1 - ? this.sequence.subs[1].toInteger() - : undefined; - } - // The extnValue field contains a single sequence wrapping the isCA and - // pathLenConstraint. - get sequence() { - return this.extnValueObj.subs[0]; - } -} -exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3 -class X509KeyUsageExtension extends X509Extension { - get digitalSignature() { - return this.bitString[0] === 1; - } - get keyCertSign() { - return this.bitString[5] === 1; - } - get crlSign() { - return this.bitString[6] === 1; - } - // The extnValue field contains a single bit string which is a bit mask - // indicating which key usages are enabled. - get bitString() { - return this.extnValueObj.subs[0].toBitString(); - } -} -exports.X509KeyUsageExtension = X509KeyUsageExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6 -class X509SubjectAlternativeNameExtension extends X509Extension { - get rfc822Name() { - return this.findGeneralName(0x01)?.value.toString('ascii'); - } - get uri() { - return this.findGeneralName(0x06)?.value.toString('ascii'); - } - // Retrieve the value of an otherName with the given OID. - otherName(oid) { - const otherName = this.findGeneralName(0x00); - if (otherName === undefined) { - return undefined; - } - // The otherName is a sequence containing an OID and a value. - // Need to check that the OID matches the one we're looking for. - const otherNameOID = otherName.subs[0].toOID(); - if (otherNameOID !== oid) { - return undefined; - } - // The otherNameValue is a sequence containing the actual value. - const otherNameValue = otherName.subs[1]; - return otherNameValue.subs[0].value.toString('ascii'); - } - findGeneralName(tag) { - return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag)); - } - // The extnValue field contains a sequence of GeneralNames. - get generalNames() { - return this.extnValueObj.subs[0].subs; - } -} -exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1 -class X509AuthorityKeyIDExtension extends X509Extension { - get keyIdentifier() { - return this.findSequenceMember(0x00)?.value; - } - findSequenceMember(tag) { - return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag)); - } - // The extnValue field contains a single sequence wrapping the keyIdentifier - get sequence() { - return this.extnValueObj.subs[0]; - } -} -exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension; -// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2 -class X509SubjectKeyIDExtension extends X509Extension { - get keyIdentifier() { - return this.extnValueObj.subs[0].value; - } -} -exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension; -// https://www.rfc-editor.org/rfc/rfc6962#section-3.3 -class X509SCTExtension extends X509Extension { - constructor(asn1) { - super(asn1); - } - get signedCertificateTimestamps() { - const buf = this.extnValueObj.subs[0].value; - const stream = new stream_1.ByteStream(buf); - // The overall list length is encoded in the first two bytes -- note this - // is the length of the list in bytes, NOT the number of SCTs in the list - const end = stream.getUint16() + 2; - const sctList = []; - while (stream.position < end) { - // Read the length of the next SCT - const sctLength = stream.getUint16(); - // Slice out the bytes for the next SCT and parse it - const sct = stream.getBlock(sctLength); - sctList.push(sct_1.SignedCertificateTimestamp.parse(sct)); - } - if (stream.position !== end) { - throw new Error('SCT list length does not match actual length'); - } - return sctList; - } -} -exports.X509SCTExtension = X509SCTExtension; diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js deleted file mode 100644 index cdd77e58f37d5..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; -var cert_1 = require("./cert"); -Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } }); -Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } }); -var ext_1 = require("./ext"); -Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js deleted file mode 100644 index 1603059c0d1ac..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js +++ /dev/null @@ -1,141 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SignedCertificateTimestamp = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const crypto = __importStar(require("../crypto")); -const stream_1 = require("../stream"); -class SignedCertificateTimestamp { - constructor(options) { - this.version = options.version; - this.logID = options.logID; - this.timestamp = options.timestamp; - this.extensions = options.extensions; - this.hashAlgorithm = options.hashAlgorithm; - this.signatureAlgorithm = options.signatureAlgorithm; - this.signature = options.signature; - } - get datetime() { - return new Date(Number(this.timestamp.readBigInt64BE())); - } - // Returns the hash algorithm used to generate the SCT's signature. - // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 - get algorithm() { - switch (this.hashAlgorithm) { - /* istanbul ignore next */ - case 0: - return 'none'; - /* istanbul ignore next */ - case 1: - return 'md5'; - /* istanbul ignore next */ - case 2: - return 'sha1'; - /* istanbul ignore next */ - case 3: - return 'sha224'; - case 4: - return 'sha256'; - /* istanbul ignore next */ - case 5: - return 'sha384'; - /* istanbul ignore next */ - case 6: - return 'sha512'; - /* istanbul ignore next */ - default: - return 'unknown'; - } - } - verify(preCert, key) { - // Assemble the digitally-signed struct (the data over which the signature - // was generated). - // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 - const stream = new stream_1.ByteStream(); - stream.appendChar(this.version); - stream.appendChar(0x00); // SignatureType = certificate_timestamp(0) - stream.appendView(this.timestamp); - stream.appendUint16(0x01); // LogEntryType = precert_entry(1) - stream.appendView(preCert); - stream.appendUint16(this.extensions.byteLength); - /* istanbul ignore next - extensions are very uncommon */ - if (this.extensions.byteLength > 0) { - stream.appendView(this.extensions); - } - return crypto.verify(stream.buffer, key, this.signature, this.algorithm); - } - // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using - // TLS encoding which means the fields and lengths of most fields are - // specified as part of the SCT and TLS specs. - // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 - // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 - static parse(buf) { - const stream = new stream_1.ByteStream(buf); - // Version - enum { v1(0), (255) } - const version = stream.getUint8(); - // Log ID - struct { opaque key_id[32]; } - const logID = stream.getBlock(32); - // Timestamp - uint64 - const timestamp = stream.getBlock(8); - // Extensions - opaque extensions<0..2^16-1>; - const extenstionLength = stream.getUint16(); - const extensions = stream.getBlock(extenstionLength); - // Hash algo - enum { sha256(4), . . . (255) } - const hashAlgorithm = stream.getUint8(); - // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) } - const signatureAlgorithm = stream.getUint8(); - // Signature - opaque signature<0..2^16-1>; - const sigLength = stream.getUint16(); - const signature = stream.getBlock(sigLength); - // Check that we read the entire buffer - if (stream.position !== buf.length) { - throw new Error('SCT buffer length mismatch'); - } - return new SignedCertificateTimestamp({ - version, - logID, - timestamp, - extensions, - hashAlgorithm, - signatureAlgorithm, - signature, - }); - } -} -exports.SignedCertificateTimestamp = SignedCertificateTimestamp; diff --git a/node_modules/pacote/node_modules/@sigstore/core/package.json b/node_modules/pacote/node_modules/@sigstore/core/package.json deleted file mode 100644 index af5dd281ac90e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/core/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@sigstore/core", - "version": "2.0.0", - "description": "Base library for Sigstore", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme", - "publishConfig": { - "provenance": true - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/LICENSE b/node_modules/pacote/node_modules/@sigstore/sign/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js deleted file mode 100644 index 61d5eba4568a3..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js +++ /dev/null @@ -1,50 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BaseBundleBuilder = void 0; -// BaseBundleBuilder is a base class for BundleBuilder implementations. It -// provides a the basic wokflow for signing and witnessing an artifact. -// Subclasses must implement the `package` method to assemble a valid bundle -// with the generated signature and verification material. -class BaseBundleBuilder { - constructor(options) { - this.signer = options.signer; - this.witnesses = options.witnesses; - } - // Executes the signing/witnessing process for the given artifact. - async create(artifact) { - const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob)); - const bundle = await this.package(artifact, signature); - // Invoke all of the witnesses in parallel - const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key)))); - // Collect the verification material from all of the witnesses - const tlogEntryList = []; - const timestampList = []; - verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => { - tlogEntryList.push(...(tlogEntries ?? [])); - timestampList.push(...(rfc3161Timestamps ?? [])); - }); - // Merge the collected verification material into the bundle - bundle.verificationMaterial.tlogEntries = tlogEntryList; - bundle.verificationMaterial.timestampVerificationData = { - rfc3161Timestamps: timestampList, - }; - return bundle; - } - // Override this function to apply any pre-signing transformations to the - // artifact. The returned buffer will be signed by the signer. The default - // implementation simply returns the artifact data. - async prepare(artifact) { - return artifact.data; - } -} -exports.BaseBundleBuilder = BaseBundleBuilder; -// Extracts the public key from a KeyMaterial. Returns either the public key -// or the certificate, depending on the type of key material. -function publicKey(key) { - switch (key.$case) { - case 'publicKey': - return key.publicKey; - case 'x509Certificate': - return key.certificate; - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js deleted file mode 100644 index ed32286ad88ef..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toMessageSignatureBundle = toMessageSignatureBundle; -exports.toDSSEBundle = toDSSEBundle; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const sigstore = __importStar(require("@sigstore/bundle")); -const util_1 = require("../util"); -// Helper functions for assembling the parts of a Sigstore bundle -// Message signature bundle - $case: 'messageSignature' -function toMessageSignatureBundle(artifact, signature) { - const digest = util_1.crypto.digest('sha256', artifact.data); - return sigstore.toMessageSignatureBundle({ - digest, - signature: signature.signature, - certificate: signature.key.$case === 'x509Certificate' - ? util_1.pem.toDER(signature.key.certificate) - : undefined, - keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, - certificateChain: true, - }); -} -// DSSE envelope bundle - $case: 'dsseEnvelope' -function toDSSEBundle(artifact, signature, certificateChain) { - return sigstore.toDSSEBundle({ - artifact: artifact.data, - artifactType: artifact.type, - signature: signature.signature, - certificate: signature.key.$case === 'x509Certificate' - ? util_1.pem.toDER(signature.key.certificate) - : undefined, - keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, - certificateChain, - }); -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js deleted file mode 100644 index 86046ba8f3013..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DSSEBundleBuilder = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const util_1 = require("../util"); -const base_1 = require("./base"); -const bundle_1 = require("./bundle"); -// BundleBuilder implementation for DSSE wrapped attestations -class DSSEBundleBuilder extends base_1.BaseBundleBuilder { - constructor(options) { - super(options); - this.certificateChain = options.certificateChain ?? false; - } - // DSSE requires the artifact to be pre-encoded with the payload type - // before the signature is generated. - async prepare(artifact) { - const a = artifactDefaults(artifact); - return util_1.dsse.preAuthEncoding(a.type, a.data); - } - // Packages the artifact and signature into a DSSE bundle - async package(artifact, signature) { - return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.certificateChain); - } -} -exports.DSSEBundleBuilder = DSSEBundleBuilder; -// Defaults the artifact type to an empty string if not provided -function artifactDefaults(artifact) { - return { - ...artifact, - type: artifact.type ?? '', - }; -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js deleted file mode 100644 index d67c8c324a4f0..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js +++ /dev/null @@ -1,7 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; -var dsse_1 = require("./dsse"); -Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } }); -var message_1 = require("./message"); -Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js deleted file mode 100644 index e3991f42bab93..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MessageSignatureBundleBuilder = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const base_1 = require("./base"); -const bundle_1 = require("./bundle"); -// BundleBuilder implementation for raw message signatures -class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder { - constructor(options) { - super(options); - } - async package(artifact, signature) { - return (0, bundle_1.toMessageSignatureBundle)(artifact, signature); - } -} -exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js deleted file mode 100644 index d28f1913cc77e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.InternalError = void 0; -exports.internalError = internalError; -const error_1 = require("./external/error"); -class InternalError extends Error { - constructor({ code, message, cause, }) { - super(message); - this.name = this.constructor.name; - this.cause = cause; - this.code = code; - } -} -exports.InternalError = InternalError; -function internalError(err, code, message) { - if (err instanceof error_1.HTTPError) { - message += ` - ${err.message}`; - } - throw new InternalError({ - code: code, - message: message, - cause: err, - }); -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js deleted file mode 100644 index a6a65adebb176..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.HTTPError = void 0; -class HTTPError extends Error { - constructor({ status, message, location, }) { - super(`(${status}) ${message}`); - this.statusCode = status; - this.location = location; - } -} -exports.HTTPError = HTTPError; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js deleted file mode 100644 index 116090f3c641e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js +++ /dev/null @@ -1,98 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fetchWithRetry = fetchWithRetry; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const http2_1 = require("http2"); -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); -const proc_log_1 = require("proc-log"); -const promise_retry_1 = __importDefault(require("promise-retry")); -const util_1 = require("../util"); -const error_1 = require("./error"); -const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants; -async function fetchWithRetry(url, options) { - return (0, promise_retry_1.default)(async (retry, attemptNum) => { - const method = options.method || 'POST'; - const headers = { - [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(), - ...options.headers, - }; - const response = await (0, make_fetch_happen_1.default)(url, { - method, - headers, - body: options.body, - timeout: options.timeout, - retry: false, // We're handling retries ourselves - }).catch((reason) => { - proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`); - return retry(reason); - }); - if (response.ok) { - return response; - } - else { - const error = await errorFromResponse(response); - proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`); - if (retryable(response.status)) { - return retry(error); - } - else { - throw error; - } - } - }, retryOpts(options.retry)); -} -// Translate a Response into an HTTPError instance. This will attempt to parse -// the response body for a message, but will default to the statusText if none -// is found. -const errorFromResponse = async (response) => { - let message = response.statusText; - const location = response.headers.get(HTTP2_HEADER_LOCATION) || undefined; - const contentType = response.headers.get(HTTP2_HEADER_CONTENT_TYPE); - // If response type is JSON, try to parse the body for a message - if (contentType?.includes('application/json')) { - try { - const body = await response.json(); - message = body.message || message; - } - catch (e) { - // ignore - } - } - return new error_1.HTTPError({ - status: response.status, - message: message, - location: location, - }); -}; -// Determine if a status code is retryable. This includes 5xx errors, 408, and -// 429. -const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR; -// Normalize the retry options to the format expected by promise-retry -const retryOpts = (retry) => { - if (typeof retry === 'boolean') { - return { retries: retry ? 1 : 0 }; - } - else if (typeof retry === 'number') { - return { retries: retry }; - } - else { - return { retries: 0, ...retry }; - } -}; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js deleted file mode 100644 index de6a1ad9f9e79..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js +++ /dev/null @@ -1,41 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Fulcio = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fetch_1 = require("./fetch"); -/** - * Fulcio API client. - */ -class Fulcio { - constructor(options) { - this.options = options; - } - async createSigningCertificate(request) { - const { baseURL, retry, timeout } = this.options; - const url = `${baseURL}/api/v2/signingCert`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(request), - timeout, - retry, - }); - return response.json(); - } -} -exports.Fulcio = Fulcio; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js deleted file mode 100644 index bb59a126e032f..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js +++ /dev/null @@ -1,80 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Rekor = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fetch_1 = require("./fetch"); -/** - * Rekor API client. - */ -class Rekor { - constructor(options) { - this.options = options; - } - /** - * Create a new entry in the Rekor log. - * @param propsedEntry {ProposedEntry} Data to create a new entry - * @returns {Promise} The created entry - */ - async createEntry(propsedEntry) { - const { baseURL, timeout, retry } = this.options; - const url = `${baseURL}/api/v1/log/entries`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - headers: { - 'Content-Type': 'application/json', - Accept: 'application/json', - }, - body: JSON.stringify(propsedEntry), - timeout, - retry, - }); - const data = await response.json(); - return entryFromResponse(data); - } - /** - * Get an entry from the Rekor log. - * @param uuid {string} The UUID of the entry to retrieve - * @returns {Promise} The retrieved entry - */ - async getEntry(uuid) { - const { baseURL, timeout, retry } = this.options; - const url = `${baseURL}/api/v1/log/entries/${uuid}`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - method: 'GET', - headers: { - Accept: 'application/json', - }, - timeout, - retry, - }); - const data = await response.json(); - return entryFromResponse(data); - } -} -exports.Rekor = Rekor; -// Unpack the response from the Rekor API into a more convenient format. -function entryFromResponse(data) { - const entries = Object.entries(data); - if (entries.length != 1) { - throw new Error('Received multiple entries in Rekor response'); - } - // Grab UUID and entry data from the response - const [uuid, entry] = entries[0]; - return { - ...entry, - uuid, - }; -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js deleted file mode 100644 index a948ba9cca2c7..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TimestampAuthority = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fetch_1 = require("./fetch"); -class TimestampAuthority { - constructor(options) { - this.options = options; - } - async createTimestamp(request) { - const { baseURL, timeout, retry } = this.options; - const url = `${baseURL}/api/v1/timestamp`; - const response = await (0, fetch_1.fetchWithRetry)(url, { - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(request), - timeout, - retry, - }); - return response.buffer(); - } -} -exports.TimestampAuthority = TimestampAuthority; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js deleted file mode 100644 index d79133952b605..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CIContextProvider = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); -// Collection of all the CI-specific providers we have implemented -const providers = [getGHAToken, getEnv]; -/** - * CIContextProvider is a composite identity provider which will iterate - * over all of the CI-specific providers and return the token from the first - * one that resolves. - */ -class CIContextProvider { - /* istanbul ignore next */ - constructor(audience = 'sigstore') { - this.audience = audience; - } - // Invoke all registered ProviderFuncs and return the value of whichever one - // resolves first. - async getToken() { - return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available')); - } -} -exports.CIContextProvider = CIContextProvider; -/** - * getGHAToken can retrieve an OIDC token when running in a GitHub Actions - * workflow - */ -async function getGHAToken(audience) { - // Check to see if we're running in GitHub Actions - if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL || - !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) { - return Promise.reject('no token available'); - } - // Construct URL to request token w/ appropriate audience - const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL); - url.searchParams.append('audience', audience); - const response = await (0, make_fetch_happen_1.default)(url.href, { - retry: 2, - headers: { - Accept: 'application/json', - Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`, - }, - }); - return response.json().then((data) => data.value); -} -/** - * getEnv can retrieve an OIDC token from an environment variable. - * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar - */ -async function getEnv() { - if (!process.env.SIGSTORE_ID_TOKEN) { - return Promise.reject('no token available'); - } - return process.env.SIGSTORE_ID_TOKEN; -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js deleted file mode 100644 index 1c1223b443fab..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CIContextProvider = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var ci_1 = require("./ci"); -Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js deleted file mode 100644 index 383b76083361b..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; -var bundler_1 = require("./bundler"); -Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } }); -Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } }); -var identity_1 = require("./identity"); -Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } }); -var signer_1 = require("./signer"); -Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } }); -Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } }); -var witness_1 = require("./witness"); -Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } }); -Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } }); -Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js deleted file mode 100644 index f01703cfab564..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CAClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const fulcio_1 = require("../../external/fulcio"); -class CAClient { - constructor(options) { - this.fulcio = new fulcio_1.Fulcio({ - baseURL: options.fulcioBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async createSigningCertificate(identityToken, publicKey, challenge) { - const request = toCertificateRequest(identityToken, publicKey, challenge); - try { - const resp = await this.fulcio.createSigningCertificate(request); - // Account for the fact that the response may contain either a - // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. - const cert = resp.signedCertificateEmbeddedSct - ? resp.signedCertificateEmbeddedSct - : resp.signedCertificateDetachedSct; - return cert.chain.certificates; - } - catch (err) { - (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate'); - } - } -} -exports.CAClient = CAClient; -function toCertificateRequest(identityToken, publicKey, challenge) { - return { - credentials: { - oidcIdentityToken: identityToken, - }, - publicKeyRequest: { - publicKey: { - algorithm: 'ECDSA', - content: publicKey, - }, - proofOfPossession: challenge.toString('base64'), - }, - }; -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js deleted file mode 100644 index 481aa5c3579a2..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.EphemeralSigner = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const crypto_1 = __importDefault(require("crypto")); -const EC_KEYPAIR_TYPE = 'ec'; -const P256_CURVE = 'P-256'; -// Signer implementation which uses an ephemeral keypair to sign artifacts. -// The private key lives only in memory and is tied to the lifetime of the -// EphemeralSigner instance. -class EphemeralSigner { - constructor() { - this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, { - namedCurve: P256_CURVE, - }); - } - async sign(data) { - const signature = crypto_1.default.sign(null, data, this.keypair.privateKey); - const publicKey = this.keypair.publicKey - .export({ format: 'pem', type: 'spki' }) - .toString('ascii'); - return { - signature: signature, - key: { $case: 'publicKey', publicKey }, - }; - } -} -exports.EphemeralSigner = EphemeralSigner; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js deleted file mode 100644 index 89a432548d2b4..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js +++ /dev/null @@ -1,87 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const util_1 = require("../../util"); -const ca_1 = require("./ca"); -const ephemeral_1 = require("./ephemeral"); -exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; -// Signer implementation which can be used to decorate another signer -// with a Fulcio-issued signing certificate for the signer's public key. -// Must be instantiated with an identity provider which can provide a JWT -// which represents the identity to be bound to the signing certificate. -class FulcioSigner { - constructor(options) { - this.ca = new ca_1.CAClient({ - ...options, - fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL, - }); - this.identityProvider = options.identityProvider; - this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner(); - } - async sign(data) { - // Retrieve identity token from the supplied identity provider - const identityToken = await this.getIdentityToken(); - // Extract challenge claim from OIDC token - let subject; - try { - subject = util_1.oidc.extractJWTSubject(identityToken); - } - catch (err) { - throw new error_1.InternalError({ - code: 'IDENTITY_TOKEN_PARSE_ERROR', - message: `invalid identity token: ${identityToken}`, - cause: err, - }); - } - // Construct challenge value by signing the subject claim - const challenge = await this.keyHolder.sign(Buffer.from(subject)); - if (challenge.key.$case !== 'publicKey') { - throw new error_1.InternalError({ - code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', - message: 'unexpected format for signing key', - }); - } - // Create signing certificate - const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature); - // Generate artifact signature - const signature = await this.keyHolder.sign(data); - // Specifically returning only the first certificate in the chain - // as the key. - return { - signature: signature.signature, - key: { - $case: 'x509Certificate', - certificate: certificates[0], - }, - }; - } - async getIdentityToken() { - try { - return await this.identityProvider.getToken(); - } - catch (err) { - throw new error_1.InternalError({ - code: 'IDENTITY_TOKEN_READ_ERROR', - message: 'error retrieving identity token', - cause: err, - }); - } - } -} -exports.FulcioSigner = FulcioSigner; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js deleted file mode 100644 index e2087767b81c1..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -/* istanbul ignore file */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var fulcio_1 = require("./fulcio"); -Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } }); -Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js deleted file mode 100644 index b92c54183375d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js deleted file mode 100644 index f467c9150c348..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var core_1 = require("@sigstore/core"); -Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } }); -Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } }); -Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } }); -Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } }); -Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } }); -exports.oidc = __importStar(require("./oidc")); -exports.ua = __importStar(require("./ua")); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js deleted file mode 100644 index 37c5b168ee12e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.extractJWTSubject = extractJWTSubject; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -function extractJWTSubject(jwt) { - const parts = jwt.split('.', 3); - const payload = JSON.parse(core_1.encoding.base64Decode(parts[1])); - switch (payload.iss) { - case 'https://accounts.google.com': - case 'https://oauth2.sigstore.dev/auth': - return payload.email; - default: - return payload.sub; - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js deleted file mode 100644 index b15ff2070fb9f..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getUserAgent = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const os_1 = __importDefault(require("os")); -// Format User-Agent: / () -// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent -const getUserAgent = () => { - const packageVersion = require('../../package.json').version; - const nodeVersion = process.version; - const platformName = os_1.default.platform(); - const archName = os_1.default.arch(); - return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`; -}; -exports.getUserAgent = getUserAgent; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js deleted file mode 100644 index 72677c399caa7..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -/* istanbul ignore file */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var tlog_1 = require("./tlog"); -Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } }); -Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } }); -var tsa_1 = require("./tsa"); -Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js deleted file mode 100644 index 22c895f2ca7ed..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TLogClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const error_2 = require("../../external/error"); -const rekor_1 = require("../../external/rekor"); -class TLogClient { - constructor(options) { - this.fetchOnConflict = options.fetchOnConflict ?? false; - this.rekor = new rekor_1.Rekor({ - baseURL: options.rekorBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async createEntry(proposedEntry) { - let entry; - try { - entry = await this.rekor.createEntry(proposedEntry); - } - catch (err) { - // If the entry already exists, fetch it (if enabled) - if (entryExistsError(err) && this.fetchOnConflict) { - // Grab the UUID of the existing entry from the location header - /* istanbul ignore next */ - const uuid = err.location.split('/').pop() || ''; - try { - entry = await this.rekor.getEntry(uuid); - } - catch (err) { - (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry'); - } - } - else { - (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry'); - } - } - return entry; - } -} -exports.TLogClient = TLogClient; -function entryExistsError(value) { - return (value instanceof error_2.HTTPError && - value.statusCode === 409 && - value.location !== undefined); -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js deleted file mode 100644 index bb1c68e914b90..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js +++ /dev/null @@ -1,140 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toProposedEntry = toProposedEntry; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const bundle_1 = require("@sigstore/bundle"); -const util_1 = require("../../util"); -const SHA256_ALGORITHM = 'sha256'; -function toProposedEntry(content, publicKey, -// TODO: Remove this parameter once have completely switched to 'dsse' entries -entryType = 'dsse') { - switch (content.$case) { - case 'dsseEnvelope': - // TODO: Remove this conditional once have completely ditched "intoto" entries - if (entryType === 'intoto') { - return toProposedIntotoEntry(content.dsseEnvelope, publicKey); - } - return toProposedDSSEEntry(content.dsseEnvelope, publicKey); - case 'messageSignature': - return toProposedHashedRekordEntry(content.messageSignature, publicKey); - } -} -// Returns a properly formatted Rekor "hashedrekord" entry for the given digest -// and signature -function toProposedHashedRekordEntry(messageSignature, publicKey) { - const hexDigest = messageSignature.messageDigest.digest.toString('hex'); - const b64Signature = messageSignature.signature.toString('base64'); - const b64Key = util_1.encoding.base64Encode(publicKey); - return { - apiVersion: '0.0.1', - kind: 'hashedrekord', - spec: { - data: { - hash: { - algorithm: SHA256_ALGORITHM, - value: hexDigest, - }, - }, - signature: { - content: b64Signature, - publicKey: { - content: b64Key, - }, - }, - }, - }; -} -// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope -// and signature -function toProposedDSSEEntry(envelope, publicKey) { - const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope)); - const encodedKey = util_1.encoding.base64Encode(publicKey); - return { - apiVersion: '0.0.1', - kind: 'dsse', - spec: { - proposedContent: { - envelope: envelopeJSON, - verifiers: [encodedKey], - }, - }, - }; -} -// Returns a properly formatted Rekor "intoto" entry for the given DSSE -// envelope and signature -function toProposedIntotoEntry(envelope, publicKey) { - // Calculate the value for the payloadHash field in the Rekor entry - const payloadHash = util_1.crypto - .digest(SHA256_ALGORITHM, envelope.payload) - .toString('hex'); - // Calculate the value for the hash field in the Rekor entry - const envelopeHash = calculateDSSEHash(envelope, publicKey); - // Collect values for re-creating the DSSE envelope. - // Double-encode payload and signature cause that's what Rekor expects - const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); - const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64')); - const keyid = envelope.signatures[0].keyid; - const encodedKey = util_1.encoding.base64Encode(publicKey); - // Create the envelope portion of the entry. Note the inclusion of the - // publicKey in the signature struct is not a standard part of a DSSE - // envelope, but is required by Rekor. - const dsse = { - payloadType: envelope.payloadType, - payload: payload, - signatures: [{ sig, publicKey: encodedKey }], - }; - // If the keyid is an empty string, Rekor seems to remove it altogether. We - // need to do the same here so that we can properly recreate the entry for - // verification. - if (keyid.length > 0) { - dsse.signatures[0].keyid = keyid; - } - return { - apiVersion: '0.0.2', - kind: 'intoto', - spec: { - content: { - envelope: dsse, - hash: { algorithm: SHA256_ALGORITHM, value: envelopeHash }, - payloadHash: { algorithm: SHA256_ALGORITHM, value: payloadHash }, - }, - }, - }; -} -// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry. -// There is no standard way to do this, so the scheme we're using as as -// follows: -// * payload is base64 encoded -// * signature is base64 encoded (only the first signature is used) -// * keyid is included ONLY if it is NOT an empty string -// * The resulting JSON is canonicalized and hashed to a hex string -function calculateDSSEHash(envelope, publicKey) { - const dsse = { - payloadType: envelope.payloadType, - payload: envelope.payload.toString('base64'), - signatures: [ - { sig: envelope.signatures[0].sig.toString('base64'), publicKey }, - ], - }; - // If the keyid is an empty string, Rekor seems to remove it altogether. - if (envelope.signatures[0].keyid.length > 0) { - dsse.signatures[0].keyid = envelope.signatures[0].keyid; - } - return util_1.crypto - .digest(SHA256_ALGORITHM, util_1.json.canonicalize(dsse)) - .toString('hex'); -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js deleted file mode 100644 index 6197b09d4cdd9..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js +++ /dev/null @@ -1,82 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const util_1 = require("../../util"); -const client_1 = require("./client"); -const entry_1 = require("./entry"); -exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev'; -class RekorWitness { - constructor(options) { - this.entryType = options.entryType; - this.tlog = new client_1.TLogClient({ - ...options, - rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL, - }); - } - async testify(content, publicKey) { - const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType); - const entry = await this.tlog.createEntry(proposedEntry); - return toTransparencyLogEntry(entry); - } -} -exports.RekorWitness = RekorWitness; -function toTransparencyLogEntry(entry) { - const logID = Buffer.from(entry.logID, 'hex'); - // Parse entry body so we can extract the kind and version. - const bodyJSON = util_1.encoding.base64Decode(entry.body); - const entryBody = JSON.parse(bodyJSON); - const promise = entry?.verification?.signedEntryTimestamp - ? inclusionPromise(entry.verification.signedEntryTimestamp) - : undefined; - const proof = entry?.verification?.inclusionProof - ? inclusionProof(entry.verification.inclusionProof) - : undefined; - const tlogEntry = { - logIndex: entry.logIndex.toString(), - logId: { - keyId: logID, - }, - integratedTime: entry.integratedTime.toString(), - kindVersion: { - kind: entryBody.kind, - version: entryBody.apiVersion, - }, - inclusionPromise: promise, - inclusionProof: proof, - canonicalizedBody: Buffer.from(entry.body, 'base64'), - }; - return { - tlogEntries: [tlogEntry], - }; -} -function inclusionPromise(promise) { - return { - signedEntryTimestamp: Buffer.from(promise, 'base64'), - }; -} -function inclusionProof(proof) { - return { - logIndex: proof.logIndex.toString(), - treeSize: proof.treeSize.toString(), - rootHash: Buffer.from(proof.rootHash, 'hex'), - hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), - checkpoint: { - envelope: proof.checkpoint, - }, - }; -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js deleted file mode 100644 index 754de3748dbb3..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../../error"); -const tsa_1 = require("../../external/tsa"); -const util_1 = require("../../util"); -const SHA256_ALGORITHM = 'sha256'; -class TSAClient { - constructor(options) { - this.tsa = new tsa_1.TimestampAuthority({ - baseURL: options.tsaBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async createTimestamp(signature) { - const request = { - artifactHash: util_1.crypto - .digest(SHA256_ALGORITHM, signature) - .toString('base64'), - hashAlgorithm: SHA256_ALGORITHM, - }; - try { - return await this.tsa.createTimestamp(request); - } - catch (err) { - (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp'); - } - } -} -exports.TSAClient = TSAClient; diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js deleted file mode 100644 index d4f5c7c859d10..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TSAWitness = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const client_1 = require("./client"); -class TSAWitness { - constructor(options) { - this.tsa = new client_1.TSAClient({ - tsaBaseURL: options.tsaBaseURL, - retry: options.retry, - timeout: options.timeout, - }); - } - async testify(content) { - const signature = extractSignature(content); - const timestamp = await this.tsa.createTimestamp(signature); - return { - rfc3161Timestamps: [{ signedTimestamp: timestamp }], - }; - } -} -exports.TSAWitness = TSAWitness; -function extractSignature(content) { - switch (content.$case) { - case 'dsseEnvelope': - return content.dsseEnvelope.signatures[0].sig; - case 'messageSignature': - return content.messageSignature.signature; - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/sign/package.json b/node_modules/pacote/node_modules/@sigstore/sign/package.json deleted file mode 100644 index fe05e8dc2d73a..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/sign/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "@sigstore/sign", - "version": "3.0.0", - "description": "Sigstore signing library", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme", - "publishConfig": { - "provenance": true - }, - "devDependencies": { - "@sigstore/jest": "^0.0.0", - "@sigstore/mock": "^0.8.0", - "@sigstore/rekor-types": "^3.0.0", - "@types/make-fetch-happen": "^10.0.4", - "@types/promise-retry": "^1.1.6" - }, - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "make-fetch-happen": "^14.0.1", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE b/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js deleted file mode 100644 index 06a8143e70da2..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.appDataPath = appDataPath; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const os_1 = __importDefault(require("os")); -const path_1 = __importDefault(require("path")); -function appDataPath(name) { - const homedir = os_1.default.homedir(); - switch (process.platform) { - /* istanbul ignore next */ - case 'darwin': { - const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); - return path_1.default.join(appSupport, name); - } - /* istanbul ignore next */ - case 'win32': { - const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); - return path_1.default.join(localAppData, name, 'Data'); - } - /* istanbul ignore next */ - default: { - const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); - return path_1.default.join(localData, name); - } - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js deleted file mode 100644 index 328f49e40dbbd..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js +++ /dev/null @@ -1,111 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFClient = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const tuf_js_1 = require("tuf-js"); -const _1 = require("."); -const target_1 = require("./target"); -const TARGETS_DIR_NAME = 'targets'; -class TUFClient { - constructor(options) { - const url = new URL(options.mirrorURL); - const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, '')); - const cachePath = path_1.default.join(options.cachePath, repoName); - initTufCache(cachePath); - seedCache({ - cachePath, - mirrorURL: options.mirrorURL, - tufRootPath: options.rootPath, - forceInit: options.forceInit, - }); - this.updater = initClient({ - mirrorURL: options.mirrorURL, - cachePath, - forceCache: options.forceCache, - retry: options.retry, - timeout: options.timeout, - }); - } - async refresh() { - return this.updater.refresh(); - } - getTarget(targetName) { - return (0, target_1.readTarget)(this.updater, targetName); - } -} -exports.TUFClient = TUFClient; -// Initializes the TUF cache directory structure including the initial -// root.json file. If the cache directory does not exist, it will be -// created. If the targets directory does not exist, it will be created. -// If the root.json file does not exist, it will be copied from the -// rootPath argument. -function initTufCache(cachePath) { - const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME); - if (!fs_1.default.existsSync(cachePath)) { - fs_1.default.mkdirSync(cachePath, { recursive: true }); - } - if (!fs_1.default.existsSync(targetsPath)) { - fs_1.default.mkdirSync(targetsPath); - } -} -// Populates the TUF cache with the initial root.json file. If the root.json -// file does not exist (or we're forcing re-initialization), copy it from either -// the rootPath argument or from one of the repo seeds. -function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) { - const cachedRootPath = path_1.default.join(cachePath, 'root.json'); - // If the root.json file does not exist (or we're forcing re-initialization), - // populate it either from the supplied rootPath or from one of the repo seeds. - if (!fs_1.default.existsSync(cachedRootPath) || forceInit) { - if (tufRootPath) { - fs_1.default.copyFileSync(tufRootPath, cachedRootPath); - } - else { - const seeds = require('../seeds.json'); - const repoSeed = seeds[mirrorURL]; - if (!repoSeed) { - throw new _1.TUFError({ - code: 'TUF_INIT_CACHE_ERROR', - message: `No root.json found for mirror: ${mirrorURL}`, - }); - } - fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64')); - // Copy any seed targets into the cache - Object.entries(repoSeed.targets).forEach(([targetName, target]) => { - fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64')); - }); - } - } -} -function initClient(options) { - const config = { - fetchTimeout: options.timeout, - fetchRetry: options.retry, - }; - return new tuf_js_1.Updater({ - metadataBaseUrl: options.mirrorURL, - targetBaseUrl: `${options.mirrorURL}/targets`, - metadataDir: options.cachePath, - targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME), - forceCache: options.forceCache, - config, - }); -} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js deleted file mode 100644 index e13971b289ff2..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js +++ /dev/null @@ -1,12 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFError = void 0; -class TUFError extends Error { - constructor({ code, message, cause, }) { - super(message); - this.code = code; - this.cause = cause; - this.name = this.constructor.name; - } -} -exports.TUFError = TUFError; diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js deleted file mode 100644 index 2af5de93ec5d2..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0; -exports.getTrustedRoot = getTrustedRoot; -exports.initTUF = initTUF; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const appdata_1 = require("./appdata"); -const client_1 = require("./client"); -exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; -const DEFAULT_CACHE_DIR = 'sigstore-js'; -const DEFAULT_RETRY = { retries: 2 }; -const DEFAULT_TIMEOUT = 5000; -const TRUSTED_ROOT_TARGET = 'trusted_root.json'; -async function getTrustedRoot( -/* istanbul ignore next */ -options = {}) { - const client = createClient(options); - const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); - return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); -} -async function initTUF( -/* istanbul ignore next */ -options = {}) { - const client = createClient(options); - return client.refresh().then(() => client); -} -// Create a TUF client with default options -function createClient(options) { - /* istanbul ignore next */ - return new client_1.TUFClient({ - cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), - rootPath: options.rootPath, - mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL, - retry: options.retry ?? DEFAULT_RETRY, - timeout: options.timeout ?? DEFAULT_TIMEOUT, - forceCache: options.forceCache ?? false, - forceInit: options.forceInit ?? options.force ?? false, - }); -} -var error_1 = require("./error"); -Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js deleted file mode 100644 index 5c6675bdfbf5f..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readTarget = readTarget; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fs_1 = __importDefault(require("fs")); -const error_1 = require("./error"); -// Downloads and returns the specified target from the provided TUF Updater. -async function readTarget(tuf, targetPath) { - const path = await getTargetPath(tuf, targetPath); - return new Promise((resolve, reject) => { - fs_1.default.readFile(path, 'utf-8', (err, data) => { - if (err) { - reject(new error_1.TUFError({ - code: 'TUF_READ_TARGET_ERROR', - message: `error reading target ${path}`, - cause: err, - })); - } - else { - resolve(data); - } - }); - }); -} -// Returns the local path to the specified target. If the target is not yet -// cached locally, the provided TUF Updater will be used to download and -// cache the target. -async function getTargetPath(tuf, target) { - let targetInfo; - try { - targetInfo = await tuf.getTargetInfo(target); - } - catch (err) { - throw new error_1.TUFError({ - code: 'TUF_REFRESH_METADATA_ERROR', - message: 'error refreshing TUF metadata', - cause: err, - }); - } - if (!targetInfo) { - throw new error_1.TUFError({ - code: 'TUF_FIND_TARGET_ERROR', - message: `target ${target} not found`, - }); - } - let path = await tuf.findCachedTarget(targetInfo); - // An empty path here means the target has not been cached locally, or is - // out of date. In either case, we need to download it. - if (!path) { - try { - path = await tuf.downloadTarget(targetInfo); - } - catch (err) { - throw new error_1.TUFError({ - code: 'TUF_DOWNLOAD_TARGET_ERROR', - message: `error downloading target ${path}`, - cause: err, - }); - } - } - return path; -} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/package.json b/node_modules/pacote/node_modules/@sigstore/tuf/package.json deleted file mode 100644 index 808689dfddf92..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/package.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "name": "@sigstore/tuf", - "version": "3.0.0", - "description": "Client for the Sigstore TUF repository", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist", - "seeds.json" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", - "publishConfig": { - "provenance": true - }, - "devDependencies": { - "@sigstore/jest": "^0.0.0", - "@tufjs/repo-mock": "^3.0.1", - "@types/make-fetch-happen": "^10.0.4" - }, - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^3.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json b/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json deleted file mode 100644 index d1d3c6b5c4604..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json +++ /dev/null @@ -1 +0,0 @@ -{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
 "signatures": [
  {
   "keyid": "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
   "sig": "30460221008ab1f6f17d4f9e6d7dcf1c88912b6b53cc10388644ae1f09bc37a082cd06003e022100e145ef4c7b782d4e8107b53437e669d0476892ce999903ae33d14448366996e7"
  },
  {
   "keyid": "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
   "sig": "3045022100c768b2f86da99569019c160a081da54ae36c34c0a3120d3cb69b53b7d113758e02204f671518f617b20d46537fae6c3b63bae8913f4f1962156105cc4f019ac35c6a"
  },
  {
   "keyid": "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
   "sig": "3045022100b4434e6995d368d23e74759acd0cb9013c83a5d3511f0f997ec54c456ae4350a022015b0e265d182d2b61dc74e155d98b3c3fbe564ba05286aa14c8df02c9b756516"
  },
  {
   "keyid": "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
   "sig": "304502210082c58411d989eb9f861410857d42381590ec9424dbdaa51e78ed13515431904e0220118185da6a6c2947131c17797e2bb7620ce26e5f301d1ceac5f2a7e58f9dcf2e"
  },
  {
   "keyid": "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70",
   "sig": "3046022100c78513854cae9c32eaa6b88e18912f48006c2757a258f917312caba75948eb9e022100d9e1b4ce0adfe9fd2e2148d7fa27a2f40ba1122bd69da7612d8d1776b013c91d"
  },
  {
   "keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
   "sig": "3045022056483a2d5d9ea9cec6e11eadfb33c484b614298faca15acf1c431b11ed7f734c022100d0c1d726af92a87e4e66459ca5adf38a05b44e1f94318423f954bae8bca5bb2e"
  },
  {
   "keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
   "sig": "3046022100d004de88024c32dc5653a9f4843cfc5215427048ad9600d2cf9c969e6edff3d2022100d9ebb798f5fc66af10899dece014a8628ccf3c5402cd4a4270207472f8f6e712"
  },
  {
   "keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
   "sig": "3046022100b7b09996c45ca2d4b05603e56baefa29718a0b71147cf8c6e66349baa61477df022100c4da80c717b4fa7bba0fd5c72da8a0499358b01358b2309f41d1456ea1e7e1d9"
  },
  {
   "keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
   "sig": "3046022100be9782c30744e411a82fa85b5138d601ce148bc19258aec64e7ec24478f38812022100caef63dcaf1a4b9a500d3bd0e3f164ec18f1b63d7a9460d9acab1066db0f016d"
  },
  {
   "keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
   "sig": "30450220746ec3f8534ce55531d0d01ff64964ef440d1e7d2c4c142409b8e9769f1ada6f022100e3b929fcd93ea18feaa0825887a7210489879a66780c07a83f4bd46e2f09ab3b"
  }
 ],
 "signed": {
  "_type": "root",
  "consistent_snapshot": true,
  "expires": "2025-02-19T08:04:32Z",
  "keys": {
   "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@santiagotorres"
   },
   "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@bobcallaway"
   },
   "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@dlorenc"
   },
   "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-online-uri": "gcpkms://projects/sigstore-root-signing/locations/global/keyRings/root/cryptoKeys/timestamp"
   },
   "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@joshuagl"
   },
   "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@mnm678"
   }
  },
  "roles": {
   "root": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "snapshot": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 3650,
    "x-tuf-on-ci-signing-period": 365
   },
   "targets": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "timestamp": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 7,
    "x-tuf-on-ci-signing-period": 4
   }
  },
  "spec_version": "1.0",
  "version": 10,
  "x-tuf-on-ci-expiry-period": 182,
  "x-tuf-on-ci-signing-period": 31
 }
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js deleted file mode 100644 index 1033fc422aba0..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DSSESignatureContent = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -class DSSESignatureContent { - constructor(env) { - this.env = env; - } - compareDigest(digest) { - return core_1.crypto.bufferEqual(digest, core_1.crypto.digest('sha256', this.env.payload)); - } - compareSignature(signature) { - return core_1.crypto.bufferEqual(signature, this.signature); - } - verifySignature(key) { - return core_1.crypto.verify(this.preAuthEncoding, key, this.signature); - } - get signature() { - return this.env.signatures.length > 0 - ? this.env.signatures[0].sig - : Buffer.from(''); - } - // DSSE Pre-Authentication Encoding - get preAuthEncoding() { - return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload); - } -} -exports.DSSESignatureContent = DSSESignatureContent; diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js deleted file mode 100644 index 4287d8032b75f..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toSignedEntity = toSignedEntity; -exports.signatureContent = signatureContent; -const core_1 = require("@sigstore/core"); -const dsse_1 = require("./dsse"); -const message_1 = require("./message"); -function toSignedEntity(bundle, artifact) { - const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial; - const timestamps = []; - for (const entry of tlogEntries) { - timestamps.push({ - $case: 'transparency-log', - tlogEntry: entry, - }); - } - for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) { - timestamps.push({ - $case: 'timestamp-authority', - timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp), - }); - } - return { - signature: signatureContent(bundle, artifact), - key: key(bundle), - tlogEntries, - timestamps, - }; -} -function signatureContent(bundle, artifact) { - switch (bundle.content.$case) { - case 'dsseEnvelope': - return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope); - case 'messageSignature': - return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact); - } -} -function key(bundle) { - switch (bundle.verificationMaterial.content.$case) { - case 'publicKey': - return { - $case: 'public-key', - hint: bundle.verificationMaterial.content.publicKey.hint, - }; - case 'x509CertificateChain': - return { - $case: 'certificate', - certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain - .certificates[0].rawBytes), - }; - case 'certificate': - return { - $case: 'certificate', - certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes), - }; - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js deleted file mode 100644 index 836148c68a8b6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MessageSignatureContent = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -class MessageSignatureContent { - constructor(messageSignature, artifact) { - this.signature = messageSignature.signature; - this.messageDigest = messageSignature.messageDigest.digest; - this.artifact = artifact; - } - compareSignature(signature) { - return core_1.crypto.bufferEqual(signature, this.signature); - } - compareDigest(digest) { - return core_1.crypto.bufferEqual(digest, this.messageDigest); - } - verifySignature(key) { - return core_1.crypto.verify(this.artifact, key, this.signature); - } -} -exports.MessageSignatureContent = MessageSignatureContent; diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js deleted file mode 100644 index 6cb1cd4121343..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.PolicyError = exports.VerificationError = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -class BaseError extends Error { - constructor({ code, message, cause, }) { - super(message); - this.code = code; - this.cause = cause; - this.name = this.constructor.name; - } -} -class VerificationError extends BaseError { -} -exports.VerificationError = VerificationError; -class PolicyError extends BaseError { -} -exports.PolicyError = PolicyError; diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js deleted file mode 100644 index 3222876fcd68b..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0; -/* istanbul ignore file */ -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var bundle_1 = require("./bundle"); -Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } }); -Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } }); -var trust_1 = require("./trust"); -Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } }); -var verifier_1 = require("./verifier"); -Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } }); diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js deleted file mode 100644 index a916de0e51e71..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js +++ /dev/null @@ -1,205 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CertificateChainVerifier = void 0; -exports.verifyCertificateChain = verifyCertificateChain; -const error_1 = require("../error"); -const trust_1 = require("../trust"); -function verifyCertificateChain(leaf, certificateAuthorities) { - // Filter list of trusted CAs to those which are valid for the given - // leaf certificate. - const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, { - start: leaf.notBefore, - end: leaf.notAfter, - }); - /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ - let error; - for (const ca of cas) { - try { - const verifier = new CertificateChainVerifier({ - trustedCerts: ca.certChain, - untrustedCert: leaf, - }); - return verifier.verify(); - } - catch (err) { - error = err; - } - } - // If we failed to verify the certificate chain for all of the trusted - // CAs, throw the last error we encountered. - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'Failed to verify certificate chain', - cause: error, - }); -} -class CertificateChainVerifier { - constructor(opts) { - this.untrustedCert = opts.untrustedCert; - this.trustedCerts = opts.trustedCerts; - this.localCerts = dedupeCertificates([ - ...opts.trustedCerts, - opts.untrustedCert, - ]); - } - verify() { - // Construct certificate path from leaf to root - const certificatePath = this.sort(); - // Perform validation checks on each certificate in the path - this.checkPath(certificatePath); - // Return verified certificate path - return certificatePath; - } - sort() { - const leafCert = this.untrustedCert; - // Construct all possible paths from the leaf - let paths = this.buildPaths(leafCert); - // Filter for paths which contain a trusted certificate - paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert))); - if (paths.length === 0) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'no trusted certificate path found', - }); - } - // Find the shortest of possible paths - /* istanbul ignore next */ - const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr); - // Construct chain from shortest path - // Removes the last certificate in the path, which will be a second copy - // of the root certificate given that the root is self-signed. - return [leafCert, ...path].slice(0, -1); - } - // Recursively build all possible paths from the leaf to the root - buildPaths(certificate) { - const paths = []; - const issuers = this.findIssuer(certificate); - if (issuers.length === 0) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'no valid certificate path found', - }); - } - for (let i = 0; i < issuers.length; i++) { - const issuer = issuers[i]; - // Base case - issuer is self - if (issuer.equals(certificate)) { - paths.push([certificate]); - continue; - } - // Recursively build path for the issuer - const subPaths = this.buildPaths(issuer); - // Construct paths by appending the issuer to each subpath - for (let j = 0; j < subPaths.length; j++) { - paths.push([issuer, ...subPaths[j]]); - } - } - return paths; - } - // Return all possible issuers for the given certificate - findIssuer(certificate) { - let issuers = []; - let keyIdentifier; - // Exit early if the certificate is self-signed - if (certificate.subject.equals(certificate.issuer)) { - if (certificate.verify()) { - return [certificate]; - } - } - // If the certificate has an authority key identifier, use that - // to find the issuer - if (certificate.extAuthorityKeyID) { - keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier; - // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber - // though Fulcio doesn't appear to use these - } - // Find possible issuers by comparing the authorityKeyID/subjectKeyID - // or issuer/subject. Potential issuers are added to the result array. - this.localCerts.forEach((possibleIssuer) => { - if (keyIdentifier) { - if (possibleIssuer.extSubjectKeyID) { - if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) { - issuers.push(possibleIssuer); - } - return; - } - } - // Fallback to comparing certificate issuer and subject if - // subjectKey/authorityKey extensions are not present - if (possibleIssuer.subject.equals(certificate.issuer)) { - issuers.push(possibleIssuer); - } - }); - // Remove any issuers which fail to verify the certificate - issuers = issuers.filter((issuer) => { - try { - return certificate.verify(issuer); - } - catch (ex) { - /* istanbul ignore next - should never error */ - return false; - } - }); - return issuers; - } - checkPath(path) { - /* istanbul ignore if */ - if (path.length < 1) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'certificate chain must contain at least one certificate', - }); - } - // Ensure that all certificates beyond the leaf are CAs - const validCAs = path.slice(1).every((cert) => cert.isCA); - if (!validCAs) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'intermediate certificate is not a CA', - }); - } - // Certificate's issuer must match the subject of the next certificate - // in the chain - for (let i = path.length - 2; i >= 0; i--) { - /* istanbul ignore if */ - if (!path[i].issuer.equals(path[i + 1].subject)) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'incorrect certificate name chaining', - }); - } - } - // Check pathlength constraints - for (let i = 0; i < path.length; i++) { - const cert = path[i]; - // If the certificate is a CA, check the path length - if (cert.extBasicConstraints?.isCA) { - const pathLength = cert.extBasicConstraints.pathLenConstraint; - // The path length, if set, indicates how many intermediate - // certificates (NOT including the leaf) are allowed to follow. The - // pathLength constraint of any intermediate CA certificate MUST be - // greater than or equal to it's own depth in the chain (with an - // adjustment for the leaf certificate) - if (pathLength !== undefined && pathLength < i - 1) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'path length constraint exceeded', - }); - } - } - } - } -} -exports.CertificateChainVerifier = CertificateChainVerifier; -// Remove duplicate certificates from the array -function dedupeCertificates(certs) { - for (let i = 0; i < certs.length; i++) { - for (let j = i + 1; j < certs.length; j++) { - if (certs[i].equals(certs[j])) { - certs.splice(j, 1); - j--; - } - } - } - return certs; -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js deleted file mode 100644 index cc894aab95a5d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyPublicKey = verifyPublicKey; -exports.verifyCertificate = verifyCertificate; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const certificate_1 = require("./certificate"); -const sct_1 = require("./sct"); -const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1'; -const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8'; -function verifyPublicKey(hint, timestamps, trustMaterial) { - const key = trustMaterial.publicKey(hint); - timestamps.forEach((timestamp) => { - if (!key.validFor(timestamp)) { - throw new error_1.VerificationError({ - code: 'PUBLIC_KEY_ERROR', - message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`, - }); - } - }); - return { key: key.publicKey }; -} -function verifyCertificate(leaf, timestamps, trustMaterial) { - // Check that leaf certificate chains to a trusted CA - const path = (0, certificate_1.verifyCertificateChain)(leaf, trustMaterial.certificateAuthorities); - // Check that ALL certificates are valid for ALL of the timestamps - const validForDate = timestamps.every((timestamp) => path.every((cert) => cert.validForDate(timestamp))); - if (!validForDate) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'certificate is not valid or expired at the specified date', - }); - } - return { - scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs), - signer: getSigner(path[0]), - }; -} -function getSigner(cert) { - let issuer; - const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2); - /* istanbul ignore next */ - if (issuerExtension) { - issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii'); - } - else { - issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii'); - } - const identity = { - extensions: { issuer }, - subjectAlternativeName: cert.subjectAltName, - }; - return { - key: core_1.crypto.createPublicKey(cert.publicKey), - identity, - }; -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js deleted file mode 100644 index 8eca48738096e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js +++ /dev/null @@ -1,78 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifySCTs = verifySCTs; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const trust_1 = require("../trust"); -function verifySCTs(cert, issuer, ctlogs) { - let extSCT; - // Verifying the SCT requires that we remove the SCT extension and - // re-encode the TBS structure to DER -- this value is part of the data - // over which the signature is calculated. Since this is a destructive action - // we create a copy of the certificate so we can remove the SCT extension - // without affecting the original certificate. - const clone = cert.clone(); - // Intentionally not using the findExtension method here because we want to - // remove the the SCT extension from the certificate before calculating the - // PreCert structure - for (let i = 0; i < clone.extensions.length; i++) { - const ext = clone.extensions[i]; - if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) { - extSCT = new core_1.X509SCTExtension(ext); - // Remove the extension from the certificate - clone.extensions.splice(i, 1); - break; - } - } - // No SCT extension found to verify - if (!extSCT) { - return []; - } - // Found an SCT extension but it has no SCTs - /* istanbul ignore if -- too difficult to fabricate test case for this */ - if (extSCT.signedCertificateTimestamps.length === 0) { - return []; - } - // Construct the PreCert structure - // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 - const preCert = new core_1.ByteStream(); - // Calculate hash of the issuer's public key - const issuerId = core_1.crypto.digest('sha256', issuer.publicKey); - preCert.appendView(issuerId); - // Re-encodes the certificate to DER after removing the SCT extension - const tbs = clone.tbsCertificate.toDER(); - preCert.appendUint24(tbs.length); - preCert.appendView(tbs); - // Calculate and return the verification results for each SCT - return extSCT.signedCertificateTimestamps.map((sct) => { - // Find the ctlog instance that corresponds to the SCT's logID - const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, { - logID: sct.logID, - targetDate: sct.datetime, - }); - // See if the SCT is valid for any of the CT logs - const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey)); - if (!verified) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'SCT verification failed', - }); - } - return sct.logID; - }); -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js deleted file mode 100644 index f5960cf047b84..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifySubjectAlternativeName = verifySubjectAlternativeName; -exports.verifyExtensions = verifyExtensions; -const error_1 = require("./error"); -function verifySubjectAlternativeName(policyIdentity, signerIdentity) { - if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) { - throw new error_1.PolicyError({ - code: 'UNTRUSTED_SIGNER_ERROR', - message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`, - }); - } -} -function verifyExtensions(policyExtensions, signerExtensions = {}) { - let key; - for (key in policyExtensions) { - if (signerExtensions[key] !== policyExtensions[key]) { - throw new error_1.PolicyError({ - code: 'UNTRUSTED_SIGNER_ERROR', - message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`, - }); - } - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js deleted file mode 100644 index 46619b675f886..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js +++ /dev/null @@ -1,157 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyCheckpoint = verifyCheckpoint; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const trust_1 = require("../trust"); -// Separator between the note and the signatures in a checkpoint -const CHECKPOINT_SEPARATOR = '\n\n'; -// Checkpoint signatures are of the following form: -// "– \n" -// where: -// - the prefix is an emdash (U+2014). -// - gives a human-readable representation of the signing ID. -// - is the first 4 bytes of the SHA256 hash of the -// associated public key followed by the signature bytes. -const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g; -// Verifies the checkpoint value in the given tlog entry. There are two steps -// to the verification: -// 1. Verify that all signatures in the checkpoint can be verified against a -// trusted public key -// 2. Verify that the root hash in the checkpoint matches the root hash in the -// inclusion proof -// See: https://github.com/transparency-dev/formats/blob/main/log/README.md -function verifyCheckpoint(entry, tlogs) { - // Filter tlog instances to just those which were valid at the time of the - // entry - const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { - targetDate: new Date(Number(entry.integratedTime) * 1000), - }); - const inclusionProof = entry.inclusionProof; - const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope); - const checkpoint = LogCheckpoint.fromString(signedNote.note); - // Verify that the signatures in the checkpoint are all valid - if (!verifySignedNote(signedNote, validTLogs)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'invalid checkpoint signature', - }); - } - // Verify that the root hash from the checkpoint matches the root hash in the - // inclusion proof - if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'root hash mismatch', - }); - } -} -// Verifies the signatures in the SignedNote. For each signature, the -// corresponding transparency log is looked up by the key hint and the -// signature is verified against the public key in the transparency log. -// Throws an error if any of the signatures are invalid. -function verifySignedNote(signedNote, tlogs) { - const data = Buffer.from(signedNote.note, 'utf-8'); - return signedNote.signatures.every((signature) => { - // Find the transparency log instance with the matching key hint - const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint)); - if (!tlog) { - return false; - } - return core_1.crypto.verify(data, tlog.publicKey, signature.signature); - }); -} -// SignedNote represents a signed note from a transparency log checkpoint. Consists -// of a body (or note) and one more signatures calculated over the body. See -// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope -class SignedNote { - constructor(note, signatures) { - this.note = note; - this.signatures = signatures; - } - // Deserialize a SignedNote from a string - static fromString(envelope) { - if (!envelope.includes(CHECKPOINT_SEPARATOR)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'missing checkpoint separator', - }); - } - // Split the note into the header and the data portions at the separator - const split = envelope.indexOf(CHECKPOINT_SEPARATOR); - const header = envelope.slice(0, split + 1); - const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length); - // Find all the signature lines in the data portion - const matches = data.matchAll(SIGNATURE_REGEX); - // Parse each of the matched signature lines into the name and signature. - // The first four bytes of the signature are the key hint (should match the - // first four bytes of the log ID), and the rest is the signature itself. - const signatures = Array.from(matches, (match) => { - const [, name, signature] = match; - const sigBytes = Buffer.from(signature, 'base64'); - if (sigBytes.length < 5) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'malformed checkpoint signature', - }); - } - return { - name, - keyHint: sigBytes.subarray(0, 4), - signature: sigBytes.subarray(4), - }; - }); - if (signatures.length === 0) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'no signatures found in checkpoint', - }); - } - return new SignedNote(header, signatures); - } -} -// LogCheckpoint represents a transparency log checkpoint. Consists of the -// following: -// - origin: the name of the transparency log -// - logSize: the size of the log at the time of the checkpoint -// - logHash: the root hash of the log at the time of the checkpoint -// - rest: the rest of the checkpoint body, which is a list of log entries -// See: -// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body -class LogCheckpoint { - constructor(origin, logSize, logHash, rest) { - this.origin = origin; - this.logSize = logSize; - this.logHash = logHash; - this.rest = rest; - } - static fromString(note) { - const lines = note.trimEnd().split('\n'); - if (lines.length < 3) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'too few lines in checkpoint header', - }); - } - const origin = lines[0]; - const logSize = BigInt(lines[1]); - const rootHash = Buffer.from(lines[2], 'base64'); - const rest = lines.slice(3); - return new LogCheckpoint(origin, logSize, rootHash, rest); - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js deleted file mode 100644 index 56e948de19338..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyTSATimestamp = verifyTSATimestamp; -exports.verifyTLogTimestamp = verifyTLogTimestamp; -const error_1 = require("../error"); -const checkpoint_1 = require("./checkpoint"); -const merkle_1 = require("./merkle"); -const set_1 = require("./set"); -const tsa_1 = require("./tsa"); -function verifyTSATimestamp(timestamp, data, timestampAuthorities) { - (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities); - return { - type: 'timestamp-authority', - logID: timestamp.signerSerialNumber, - timestamp: timestamp.signingTime, - }; -} -function verifyTLogTimestamp(entry, tlogAuthorities) { - let inclusionVerified = false; - if (isTLogEntryWithInclusionPromise(entry)) { - (0, set_1.verifyTLogSET)(entry, tlogAuthorities); - inclusionVerified = true; - } - if (isTLogEntryWithInclusionProof(entry)) { - (0, merkle_1.verifyMerkleInclusion)(entry); - (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities); - inclusionVerified = true; - } - if (!inclusionVerified) { - throw new error_1.VerificationError({ - code: 'TLOG_MISSING_INCLUSION_ERROR', - message: 'inclusion could not be verified', - }); - } - return { - type: 'transparency-log', - logID: entry.logId.keyId, - timestamp: new Date(Number(entry.integratedTime) * 1000), - }; -} -function isTLogEntryWithInclusionPromise(entry) { - return entry.inclusionPromise !== undefined; -} -function isTLogEntryWithInclusionProof(entry) { - return entry.inclusionProof !== undefined; -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js deleted file mode 100644 index f57cae42002bd..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js +++ /dev/null @@ -1,104 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyMerkleInclusion = verifyMerkleInclusion; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]); -const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]); -function verifyMerkleInclusion(entry) { - const inclusionProof = entry.inclusionProof; - const logIndex = BigInt(inclusionProof.logIndex); - const treeSize = BigInt(inclusionProof.treeSize); - if (logIndex < 0n || logIndex >= treeSize) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: `invalid index: ${logIndex}`, - }); - } - // Figure out which subset of hashes corresponds to the inner and border - // nodes - const { inner, border } = decompInclProof(logIndex, treeSize); - if (inclusionProof.hashes.length !== inner + border) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'invalid hash count', - }); - } - const innerHashes = inclusionProof.hashes.slice(0, inner); - const borderHashes = inclusionProof.hashes.slice(inner); - // The entry's hash is the leaf hash - const leafHash = hashLeaf(entry.canonicalizedBody); - // Chain the hashes belonging to the inner and border portions - const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes); - // Calculated hash should match the root hash in the inclusion proof - if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROOF_ERROR', - message: 'calculated root hash does not match inclusion proof', - }); - } -} -// Breaks down inclusion proof for a leaf at the specified index in a tree of -// the specified size. The split point is where paths to the index leaf and -// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof -// parts. -function decompInclProof(index, size) { - const inner = innerProofSize(index, size); - const border = onesCount(index >> BigInt(inner)); - return { inner, border }; -} -// Computes a subtree hash for a node on or below the tree's right border. -// Assumes the provided proof hashes are ordered from lower to higher levels -// and seed is the initial hash of the node specified by the index. -function chainInner(seed, hashes, index) { - return hashes.reduce((acc, h, i) => { - if ((index >> BigInt(i)) & BigInt(1)) { - return hashChildren(h, acc); - } - else { - return hashChildren(acc, h); - } - }, seed); -} -// Computes a subtree hash for nodes along the tree's right border. -function chainBorderRight(seed, hashes) { - return hashes.reduce((acc, h) => hashChildren(h, acc), seed); -} -function innerProofSize(index, size) { - return bitLength(index ^ (size - BigInt(1))); -} -// Counts the number of ones in the binary representation of the given number. -// https://en.wikipedia.org/wiki/Hamming_weight -function onesCount(num) { - return num.toString(2).split('1').length - 1; -} -// Returns the number of bits necessary to represent an integer in binary. -function bitLength(n) { - if (n === 0n) { - return 0; - } - return n.toString(2).length; -} -// Hashing logic according to RFC6962. -// https://datatracker.ietf.org/doc/html/rfc6962#section-2 -function hashChildren(left, right) { - return core_1.crypto.digest('sha256', RFC6962_NODE_HASH_PREFIX, left, right); -} -function hashLeaf(leaf) { - return core_1.crypto.digest('sha256', RFC6962_LEAF_HASH_PREFIX, leaf); -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js deleted file mode 100644 index 5d3f47bb88746..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyTLogSET = verifyTLogSET; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const trust_1 = require("../trust"); -// Verifies the SET for the given entry against the list of trusted -// transparency logs. Returns true if the SET can be verified against at least -// one of the trusted logs; otherwise, returns false. -function verifyTLogSET(entry, tlogs) { - // Filter the list of tlog instances to only those which might be able to - // verify the SET - const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { - logID: entry.logId.keyId, - targetDate: new Date(Number(entry.integratedTime) * 1000), - }); - // Check to see if we can verify the SET against any of the valid tlogs - const verified = validTLogs.some((tlog) => { - // Re-create the original Rekor verification payload - const payload = toVerificationPayload(entry); - // Canonicalize the payload and turn into a buffer for verification - const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8'); - // Extract the SET from the tlog entry - const signature = entry.inclusionPromise.signedEntryTimestamp; - return core_1.crypto.verify(data, tlog.publicKey, signature); - }); - if (!verified) { - throw new error_1.VerificationError({ - code: 'TLOG_INCLUSION_PROMISE_ERROR', - message: 'inclusion promise could not be verified', - }); - } -} -// Returns a properly formatted "VerificationPayload" for one of the -// transaction log entires in the given bundle which can be used for SET -// verification. -function toVerificationPayload(entry) { - const { integratedTime, logIndex, logId, canonicalizedBody } = entry; - return { - body: canonicalizedBody.toString('base64'), - integratedTime: Number(integratedTime), - logIndex: Number(logIndex), - logID: logId.keyId.toString('hex'), - }; -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js deleted file mode 100644 index 70388cd06c52d..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp; -const core_1 = require("@sigstore/core"); -const error_1 = require("../error"); -const certificate_1 = require("../key/certificate"); -const trust_1 = require("../trust"); -function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) { - const signingTime = timestamp.signingTime; - // Filter for CAs which were valid at the time of signing - timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, { - start: signingTime, - end: signingTime, - }); - // Filter for CAs which match serial and issuer embedded in the timestamp - timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, { - serialNumber: timestamp.signerSerialNumber, - issuer: timestamp.signerIssuer, - }); - // Check that we can verify the timestamp with AT LEAST ONE of the remaining - // CAs - const verified = timestampAuthorities.some((ca) => { - try { - verifyTimestampForCA(timestamp, data, ca); - return true; - } - catch (e) { - return false; - } - }); - if (!verified) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'timestamp could not be verified', - }); - } -} -function verifyTimestampForCA(timestamp, data, ca) { - const [leaf, ...cas] = ca.certChain; - const signingKey = core_1.crypto.createPublicKey(leaf.publicKey); - const signingTime = timestamp.signingTime; - // Verify the certificate chain for the provided CA - try { - new certificate_1.CertificateChainVerifier({ - untrustedCert: leaf, - trustedCerts: cas, - }).verify(); - } - catch (e) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'invalid certificate chain', - }); - } - // Check that all of the CA certs were valid at the time of signing - const validAtSigningTime = ca.certChain.every((cert) => cert.validForDate(signingTime)); - if (!validAtSigningTime) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'timestamp was signed with an expired certificate', - }); - } - // Check that the signing certificate's key can be used to verify the - // timestamp signature. - timestamp.verify(data, signingKey); -} -// Filters the list of CAs to those which have a leaf signing certificate which -// matches the given serial number and issuer. -function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) { - return timestampAuthorities.filter((ca) => ca.certChain.length > 0 && - core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) && - core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer)); -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js deleted file mode 100644 index d71ed8c6e7ad9..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyDSSETLogBody = verifyDSSETLogBody; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -// Compare the given intoto tlog entry to the given bundle -function verifyDSSETLogBody(tlogEntry, content) { - switch (tlogEntry.apiVersion) { - case '0.0.1': - return verifyDSSE001TLogBody(tlogEntry, content); - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported dsse version: ${tlogEntry.apiVersion}`, - }); - } -} -// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope. -function verifyDSSE001TLogBody(tlogEntry, content) { - // Ensure the bundle's DSSE only contains a single signature - if (tlogEntry.spec.signatures?.length !== 1) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'signature count mismatch', - }); - } - const tlogSig = tlogEntry.spec.signatures[0].signature; - // Ensure that the signature in the bundle's DSSE matches tlog entry - if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'tlog entry signature mismatch', - }); - // Ensure the digest of the bundle's DSSE payload matches the digest in the - // tlog entry - const tlogHash = tlogEntry.spec.payloadHash?.value || ''; - if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'DSSE payload hash mismatch', - }); - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js deleted file mode 100644 index c4aa345b57ba7..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js +++ /dev/null @@ -1,51 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -// Compare the given hashedrekord tlog entry to the given bundle -function verifyHashedRekordTLogBody(tlogEntry, content) { - switch (tlogEntry.apiVersion) { - case '0.0.1': - return verifyHashedrekord001TLogBody(tlogEntry, content); - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`, - }); - } -} -// Compare the given hashedrekord v0.0.1 tlog entry to the given message -// signature -function verifyHashedrekord001TLogBody(tlogEntry, content) { - // Ensure that the bundles message signature matches the tlog entry - const tlogSig = tlogEntry.spec.signature.content || ''; - if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'signature mismatch', - }); - } - // Ensure that the bundle's message digest matches the tlog entry - const tlogDigest = tlogEntry.spec.data.hash?.value || ''; - if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'digest mismatch', - }); - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js deleted file mode 100644 index da235360c594a..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyTLogBody = verifyTLogBody; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -const dsse_1 = require("./dsse"); -const hashedrekord_1 = require("./hashedrekord"); -const intoto_1 = require("./intoto"); -// Verifies that the given tlog entry matches the supplied signature content. -function verifyTLogBody(entry, sigContent) { - const { kind, version } = entry.kindVersion; - const body = JSON.parse(entry.canonicalizedBody.toString('utf8')); - if (kind !== body.kind || version !== body.apiVersion) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`, - }); - } - switch (body.kind) { - case 'dsse': - return (0, dsse_1.verifyDSSETLogBody)(body, sigContent); - case 'intoto': - return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent); - case 'hashedrekord': - return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent); - /* istanbul ignore next */ - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported kind: ${kind}`, - }); - } -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js deleted file mode 100644 index 9096ae9418cc3..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyIntotoTLogBody = verifyIntotoTLogBody; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const error_1 = require("../error"); -// Compare the given intoto tlog entry to the given bundle -function verifyIntotoTLogBody(tlogEntry, content) { - switch (tlogEntry.apiVersion) { - case '0.0.2': - return verifyIntoto002TLogBody(tlogEntry, content); - default: - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: `unsupported intoto version: ${tlogEntry.apiVersion}`, - }); - } -} -// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope. -function verifyIntoto002TLogBody(tlogEntry, content) { - // Ensure the bundle's DSSE contains a single signature - if (tlogEntry.spec.content.envelope.signatures?.length !== 1) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'signature count mismatch', - }); - } - // Signature is double-base64-encoded in the tlog entry - const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig); - // Ensure that the signature in the bundle's DSSE matches tlog entry - if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'tlog entry signature mismatch', - }); - } - // Ensure the digest of the bundle's DSSE payload matches the digest in the - // tlog entry - const tlogHash = tlogEntry.spec.content.payloadHash?.value || ''; - if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { - throw new error_1.VerificationError({ - code: 'TLOG_BODY_ERROR', - message: 'DSSE payload hash mismatch', - }); - } -} -function base64Decode(str) { - return Buffer.from(str, 'base64').toString('utf-8'); -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js deleted file mode 100644 index 880a16cf1940e..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.filterCertAuthorities = filterCertAuthorities; -exports.filterTLogAuthorities = filterTLogAuthorities; -function filterCertAuthorities(certAuthorities, criteria) { - return certAuthorities.filter((ca) => { - return (ca.validFor.start <= criteria.start && ca.validFor.end >= criteria.end); - }); -} -// Filter the list of tlog instances to only those which match the given log -// ID and have public keys which are valid for the given integrated time. -function filterTLogAuthorities(tlogAuthorities, criteria) { - return tlogAuthorities.filter((tlog) => { - // If we're filtering by log ID and the log IDs don't match, we can't use - // this tlog - if (criteria.logID && !tlog.logID.equals(criteria.logID)) { - return false; - } - // Check that the integrated time is within the validFor range - return (tlog.validFor.start <= criteria.targetDate && - criteria.targetDate <= tlog.validFor.end); - }); -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js deleted file mode 100644 index bfab2eb4f9975..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js +++ /dev/null @@ -1,86 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0; -exports.toTrustMaterial = toTrustMaterial; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const protobuf_specs_1 = require("@sigstore/protobuf-specs"); -const error_1 = require("../error"); -const BEGINNING_OF_TIME = new Date(0); -const END_OF_TIME = new Date(8640000000000000); -var filter_1 = require("./filter"); -Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } }); -Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } }); -function toTrustMaterial(root, keys) { - const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys); - return { - certificateAuthorities: root.certificateAuthorities.map(createCertAuthority), - timestampAuthorities: root.timestampAuthorities.map(createCertAuthority), - tlogs: root.tlogs.map(createTLogAuthority), - ctlogs: root.ctlogs.map(createTLogAuthority), - publicKey: keyFinder, - }; -} -function createTLogAuthority(tlogInstance) { - const keyDetails = tlogInstance.publicKey.keyDetails; - const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 || - keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256 - ? 'pkcs1' - : 'spki'; - return { - logID: tlogInstance.logId.keyId, - publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType), - validFor: { - start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME, - end: tlogInstance.publicKey.validFor?.end || END_OF_TIME, - }, - }; -} -function createCertAuthority(ca) { - /* istanbul ignore next */ - return { - certChain: ca.certChain.certificates.map((cert) => { - return core_1.X509Certificate.parse(cert.rawBytes); - }), - validFor: { - start: ca.validFor?.start || BEGINNING_OF_TIME, - end: ca.validFor?.end || END_OF_TIME, - }, - }; -} -function keyLocator(keys) { - return (hint) => { - const key = (keys || {})[hint]; - if (!key) { - throw new error_1.VerificationError({ - code: 'PUBLIC_KEY_ERROR', - message: `key not found: ${hint}`, - }); - } - return { - publicKey: core_1.crypto.createPublicKey(key.rawBytes), - validFor: (date) => { - /* istanbul ignore next */ - return ((key.validFor?.start || BEGINNING_OF_TIME) <= date && - (key.validFor?.end || END_OF_TIME) >= date); - }, - }; - }; -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js deleted file mode 100644 index 829727cd1d40a..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js +++ /dev/null @@ -1,141 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Verifier = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const util_1 = require("util"); -const error_1 = require("./error"); -const key_1 = require("./key"); -const policy_1 = require("./policy"); -const timestamp_1 = require("./timestamp"); -const tlog_1 = require("./tlog"); -class Verifier { - constructor(trustMaterial, options = {}) { - this.trustMaterial = trustMaterial; - this.options = { - ctlogThreshold: options.ctlogThreshold ?? 1, - tlogThreshold: options.tlogThreshold ?? 1, - tsaThreshold: options.tsaThreshold ?? 0, - }; - } - verify(entity, policy) { - const timestamps = this.verifyTimestamps(entity); - const signer = this.verifySigningKey(entity, timestamps); - this.verifyTLogs(entity); - this.verifySignature(entity, signer); - if (policy) { - this.verifyPolicy(policy, signer.identity || {}); - } - return signer; - } - // Checks that all of the timestamps in the entity are valid and returns them - verifyTimestamps(entity) { - let tlogCount = 0; - let tsaCount = 0; - const timestamps = entity.timestamps.map((timestamp) => { - switch (timestamp.$case) { - case 'timestamp-authority': - tsaCount++; - return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities); - case 'transparency-log': - tlogCount++; - return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs); - } - }); - // Check for duplicate timestamps - if (containsDupes(timestamps)) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: 'duplicate timestamp', - }); - } - if (tlogCount < this.options.tlogThreshold) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`, - }); - } - if (tsaCount < this.options.tsaThreshold) { - throw new error_1.VerificationError({ - code: 'TIMESTAMP_ERROR', - message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`, - }); - } - return timestamps.map((t) => t.timestamp); - } - // Checks that the signing key is valid for all of the the supplied timestamps - // and returns the signer. - verifySigningKey({ key }, timestamps) { - switch (key.$case) { - case 'public-key': { - return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial); - } - case 'certificate': { - const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial); - /* istanbul ignore next - no fixture */ - if (containsDupes(result.scts)) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: 'duplicate SCT', - }); - } - if (result.scts.length < this.options.ctlogThreshold) { - throw new error_1.VerificationError({ - code: 'CERTIFICATE_ERROR', - message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`, - }); - } - return result.signer; - } - } - } - // Checks that the tlog entries are valid for the supplied content - verifyTLogs({ signature: content, tlogEntries }) { - tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content)); - } - // Checks that the signature is valid for the supplied content - verifySignature(entity, signer) { - if (!entity.signature.verifySignature(signer.key)) { - throw new error_1.VerificationError({ - code: 'SIGNATURE_ERROR', - message: 'signature verification failed', - }); - } - } - verifyPolicy(policy, identity) { - // Check the subject alternative name of the signer matches the policy - if (policy.subjectAlternativeName) { - (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName); - } - // Check that the extensions of the signer match the policy - if (policy.extensions) { - (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions); - } - } -} -exports.Verifier = Verifier; -// Checks for duplicate items in the array. Objects are compared using -// deep equality. -function containsDupes(arr) { - for (let i = 0; i < arr.length; i++) { - for (let j = i + 1; j < arr.length; j++) { - if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) { - return true; - } - } - } - return false; -} diff --git a/node_modules/pacote/node_modules/@sigstore/verify/package.json b/node_modules/pacote/node_modules/@sigstore/verify/package.json deleted file mode 100644 index edf72b8bfd968..0000000000000 --- a/node_modules/pacote/node_modules/@sigstore/verify/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "@sigstore/verify", - "version": "2.0.0", - "description": "Verification of Sigstore signatures", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme", - "publishConfig": { - "provenance": true - }, - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/LICENSE b/node_modules/pacote/node_modules/@tufjs/models/LICENSE deleted file mode 100644 index 420700f5d3765..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 GitHub and the TUF Contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/base.js b/node_modules/pacote/node_modules/@tufjs/models/dist/base.js deleted file mode 100644 index 85e45d8fc1151..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/base.js +++ /dev/null @@ -1,92 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Signed = exports.MetadataKind = void 0; -exports.isMetadataKind = isMetadataKind; -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -const SPECIFICATION_VERSION = ['1', '0', '31']; -var MetadataKind; -(function (MetadataKind) { - MetadataKind["Root"] = "root"; - MetadataKind["Timestamp"] = "timestamp"; - MetadataKind["Snapshot"] = "snapshot"; - MetadataKind["Targets"] = "targets"; -})(MetadataKind || (exports.MetadataKind = MetadataKind = {})); -function isMetadataKind(value) { - return (typeof value === 'string' && - Object.values(MetadataKind).includes(value)); -} -/*** - * A base class for the signed part of TUF metadata. - * - * Objects with base class Signed are usually included in a ``Metadata`` object - * on the signed attribute. This class provides attributes and methods that - * are common for all TUF metadata types (roles). - */ -class Signed { - constructor(options) { - this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.'); - const specList = this.specVersion.split('.'); - if (!(specList.length === 2 || specList.length === 3) || - !specList.every((item) => isNumeric(item))) { - throw new error_1.ValueError('Failed to parse specVersion'); - } - // major version must match - if (specList[0] != SPECIFICATION_VERSION[0]) { - throw new error_1.ValueError('Unsupported specVersion'); - } - this.expires = options.expires; - this.version = options.version; - this.unrecognizedFields = options.unrecognizedFields || {}; - } - equals(other) { - if (!(other instanceof Signed)) { - return false; - } - return (this.specVersion === other.specVersion && - this.expires === other.expires && - this.version === other.version && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - isExpired(referenceTime) { - if (!referenceTime) { - referenceTime = new Date(); - } - return referenceTime >= new Date(this.expires); - } - static commonFieldsFromJSON(data) { - const { spec_version, expires, version, ...rest } = data; - if (!utils_1.guard.isDefined(spec_version)) { - throw new error_1.ValueError('spec_version is not defined'); - } - else if (typeof spec_version !== 'string') { - throw new TypeError('spec_version must be a string'); - } - if (!utils_1.guard.isDefined(expires)) { - throw new error_1.ValueError('expires is not defined'); - } - else if (!(typeof expires === 'string')) { - throw new TypeError('expires must be a string'); - } - if (!utils_1.guard.isDefined(version)) { - throw new error_1.ValueError('version is not defined'); - } - else if (!(typeof version === 'number')) { - throw new TypeError('version must be a number'); - } - return { - specVersion: spec_version, - expires, - version, - unrecognizedFields: rest, - }; - } -} -exports.Signed = Signed; -function isNumeric(str) { - return !isNaN(Number(str)); -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js b/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js deleted file mode 100644 index 7165f1e244393..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js +++ /dev/null @@ -1,115 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Delegations = void 0; -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const key_1 = require("./key"); -const role_1 = require("./role"); -const utils_1 = require("./utils"); -/** - * A container object storing information about all delegations. - * - * Targets roles that are trusted to provide signed metadata files - * describing targets with designated pathnames and/or further delegations. - */ -class Delegations { - constructor(options) { - this.keys = options.keys; - this.unrecognizedFields = options.unrecognizedFields || {}; - if (options.roles) { - if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) { - throw new error_1.ValueError('Delegated role name conflicts with top-level role name'); - } - } - this.succinctRoles = options.succinctRoles; - this.roles = options.roles; - } - equals(other) { - if (!(other instanceof Delegations)) { - return false; - } - return (util_1.default.isDeepStrictEqual(this.keys, other.keys) && - util_1.default.isDeepStrictEqual(this.roles, other.roles) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) && - util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles)); - } - *rolesForTarget(targetPath) { - if (this.roles) { - for (const role of Object.values(this.roles)) { - if (role.isDelegatedPath(targetPath)) { - yield { role: role.name, terminating: role.terminating }; - } - } - } - else if (this.succinctRoles) { - yield { - role: this.succinctRoles.getRoleForTarget(targetPath), - terminating: true, - }; - } - } - toJSON() { - const json = { - keys: keysToJSON(this.keys), - ...this.unrecognizedFields, - }; - if (this.roles) { - json.roles = rolesToJSON(this.roles); - } - else if (this.succinctRoles) { - json.succinct_roles = this.succinctRoles.toJSON(); - } - return json; - } - static fromJSON(data) { - const { keys, roles, succinct_roles, ...unrecognizedFields } = data; - let succinctRoles; - if (utils_1.guard.isObject(succinct_roles)) { - succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); - } - return new Delegations({ - keys: keysFromJSON(keys), - roles: rolesFromJSON(roles), - unrecognizedFields, - succinctRoles, - }); - } -} -exports.Delegations = Delegations; -function keysToJSON(keys) { - return Object.entries(keys).reduce((acc, [keyId, key]) => ({ - ...acc, - [keyId]: key.toJSON(), - }), {}); -} -function rolesToJSON(roles) { - return Object.values(roles).map((role) => role.toJSON()); -} -function keysFromJSON(data) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('keys is malformed'); - } - return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ - ...acc, - [keyID]: key_1.Key.fromJSON(keyID, keyData), - }), {}); -} -function rolesFromJSON(data) { - let roleMap; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectArray(data)) { - throw new TypeError('roles is malformed'); - } - roleMap = data.reduce((acc, role) => { - const delegatedRole = role_1.DelegatedRole.fromJSON(role); - return { - ...acc, - [delegatedRole.name]: delegatedRole, - }; - }, {}); - } - return roleMap; -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/error.js b/node_modules/pacote/node_modules/@tufjs/models/dist/error.js deleted file mode 100644 index ba80698747ba0..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/error.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; -// An error about insufficient values -class ValueError extends Error { -} -exports.ValueError = ValueError; -// An error with a repository's state, such as a missing file. -// It covers all exceptions that come from the repository side when -// looking from the perspective of users of metadata API or ngclient. -class RepositoryError extends Error { -} -exports.RepositoryError = RepositoryError; -// An error about metadata object with insufficient threshold of signatures. -class UnsignedMetadataError extends RepositoryError { -} -exports.UnsignedMetadataError = UnsignedMetadataError; -// An error while checking the length and hash values of an object. -class LengthOrHashMismatchError extends RepositoryError { -} -exports.LengthOrHashMismatchError = LengthOrHashMismatchError; -class CryptoError extends Error { -} -exports.CryptoError = CryptoError; -class UnsupportedAlgorithmError extends CryptoError { -} -exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/file.js b/node_modules/pacote/node_modules/@tufjs/models/dist/file.js deleted file mode 100644 index b35fe5950bbb7..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/file.js +++ /dev/null @@ -1,183 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TargetFile = exports.MetaFile = void 0; -const crypto_1 = __importDefault(require("crypto")); -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -// A container with information about a particular metadata file. -// -// This class is used for Timestamp and Snapshot metadata. -class MetaFile { - constructor(opts) { - if (opts.version <= 0) { - throw new error_1.ValueError('Metafile version must be at least 1'); - } - if (opts.length !== undefined) { - validateLength(opts.length); - } - this.version = opts.version; - this.length = opts.length; - this.hashes = opts.hashes; - this.unrecognizedFields = opts.unrecognizedFields || {}; - } - equals(other) { - if (!(other instanceof MetaFile)) { - return false; - } - return (this.version === other.version && - this.length === other.length && - util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - verify(data) { - // Verifies that the given data matches the expected length. - if (this.length !== undefined) { - if (data.length !== this.length) { - throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`); - } - } - // Verifies that the given data matches the supplied hashes. - if (this.hashes) { - Object.entries(this.hashes).forEach(([key, value]) => { - let hash; - try { - hash = crypto_1.default.createHash(key); - } - catch (e) { - throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); - } - const observedHash = hash.update(data).digest('hex'); - if (observedHash !== value) { - throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`); - } - }); - } - } - toJSON() { - const json = { - version: this.version, - ...this.unrecognizedFields, - }; - if (this.length !== undefined) { - json.length = this.length; - } - if (this.hashes) { - json.hashes = this.hashes; - } - return json; - } - static fromJSON(data) { - const { version, length, hashes, ...rest } = data; - if (typeof version !== 'number') { - throw new TypeError('version must be a number'); - } - if (utils_1.guard.isDefined(length) && typeof length !== 'number') { - throw new TypeError('length must be a number'); - } - if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { - throw new TypeError('hashes must be string keys and values'); - } - return new MetaFile({ - version, - length, - hashes, - unrecognizedFields: rest, - }); - } -} -exports.MetaFile = MetaFile; -// Container for info about a particular target file. -// -// This class is used for Target metadata. -class TargetFile { - constructor(opts) { - validateLength(opts.length); - this.length = opts.length; - this.path = opts.path; - this.hashes = opts.hashes; - this.unrecognizedFields = opts.unrecognizedFields || {}; - } - get custom() { - const custom = this.unrecognizedFields['custom']; - if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) { - return {}; - } - return custom; - } - equals(other) { - if (!(other instanceof TargetFile)) { - return false; - } - return (this.length === other.length && - this.path === other.path && - util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - async verify(stream) { - let observedLength = 0; - // Create a digest for each hash algorithm - const digests = Object.keys(this.hashes).reduce((acc, key) => { - try { - acc[key] = crypto_1.default.createHash(key); - } - catch (e) { - throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); - } - return acc; - }, {}); - // Read stream chunk by chunk - for await (const chunk of stream) { - // Keep running tally of stream length - observedLength += chunk.length; - // Append chunk to each digest - Object.values(digests).forEach((digest) => { - digest.update(chunk); - }); - } - // Verify length matches expected value - if (observedLength !== this.length) { - throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`); - } - // Verify each digest matches expected value - Object.entries(digests).forEach(([key, value]) => { - const expected = this.hashes[key]; - const actual = value.digest('hex'); - if (actual !== expected) { - throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`); - } - }); - } - toJSON() { - return { - length: this.length, - hashes: this.hashes, - ...this.unrecognizedFields, - }; - } - static fromJSON(path, data) { - const { length, hashes, ...rest } = data; - if (typeof length !== 'number') { - throw new TypeError('length must be a number'); - } - if (!utils_1.guard.isStringRecord(hashes)) { - throw new TypeError('hashes must have string keys and values'); - } - return new TargetFile({ - length, - path, - hashes, - unrecognizedFields: rest, - }); - } -} -exports.TargetFile = TargetFile; -// Check that supplied length if valid -function validateLength(length) { - if (length < 0) { - throw new error_1.ValueError('Length must be at least 0'); - } -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/index.js b/node_modules/pacote/node_modules/@tufjs/models/dist/index.js deleted file mode 100644 index a4dc783659f04..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/index.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; -var base_1 = require("./base"); -Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); -var error_1 = require("./error"); -Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); -var file_1 = require("./file"); -Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); -Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); -var key_1 = require("./key"); -Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); -var metadata_1 = require("./metadata"); -Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); -var root_1 = require("./root"); -Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); -var signature_1 = require("./signature"); -Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); -var snapshot_1 = require("./snapshot"); -Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); -var targets_1 = require("./targets"); -Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); -var timestamp_1 = require("./timestamp"); -Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/key.js b/node_modules/pacote/node_modules/@tufjs/models/dist/key.js deleted file mode 100644 index 5e55b09d7c6dd..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/key.js +++ /dev/null @@ -1,85 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Key = void 0; -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -const key_1 = require("./utils/key"); -// A container class representing the public portion of a Key. -class Key { - constructor(options) { - const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options; - this.keyID = keyID; - this.keyType = keyType; - this.scheme = scheme; - this.keyVal = keyVal; - this.unrecognizedFields = unrecognizedFields || {}; - } - // Verifies the that the metadata.signatures contains a signature made with - // this key and is correctly signed. - verifySignature(metadata) { - const signature = metadata.signatures[this.keyID]; - if (!signature) - throw new error_1.UnsignedMetadataError('no signature for key found in metadata'); - if (!this.keyVal.public) - throw new error_1.UnsignedMetadataError('no public key found'); - const publicKey = (0, key_1.getPublicKey)({ - keyType: this.keyType, - scheme: this.scheme, - keyVal: this.keyVal.public, - }); - const signedData = metadata.signed.toJSON(); - try { - if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { - throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); - } - } - catch (error) { - if (error instanceof error_1.UnsignedMetadataError) { - throw error; - } - throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); - } - } - equals(other) { - if (!(other instanceof Key)) { - return false; - } - return (this.keyID === other.keyID && - this.keyType === other.keyType && - this.scheme === other.scheme && - util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - toJSON() { - return { - keytype: this.keyType, - scheme: this.scheme, - keyval: this.keyVal, - ...this.unrecognizedFields, - }; - } - static fromJSON(keyID, data) { - const { keytype, scheme, keyval, ...rest } = data; - if (typeof keytype !== 'string') { - throw new TypeError('keytype must be a string'); - } - if (typeof scheme !== 'string') { - throw new TypeError('scheme must be a string'); - } - if (!utils_1.guard.isStringRecord(keyval)) { - throw new TypeError('keyval must be a string record'); - } - return new Key({ - keyID, - keyType: keytype, - scheme, - keyVal: keyval, - unrecognizedFields: rest, - }); - } -} -exports.Key = Key; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js b/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js deleted file mode 100644 index 389d2504e0b53..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js +++ /dev/null @@ -1,160 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Metadata = void 0; -const canonical_json_1 = require("@tufjs/canonical-json"); -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const error_1 = require("./error"); -const root_1 = require("./root"); -const signature_1 = require("./signature"); -const snapshot_1 = require("./snapshot"); -const targets_1 = require("./targets"); -const timestamp_1 = require("./timestamp"); -const utils_1 = require("./utils"); -/*** - * A container for signed TUF metadata. - * - * Provides methods to convert to and from json, read and write to and - * from JSON and to create and verify metadata signatures. - * - * ``Metadata[T]`` is a generic container type where T can be any one type of - * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this - * is to allow static type checking of the signed attribute in code using - * Metadata:: - * - * root_md = Metadata[Root].fromJSON("root.json") - * # root_md type is now Metadata[Root]. This means signed and its - * # attributes like consistent_snapshot are now statically typed and the - * # types can be verified by static type checkers and shown by IDEs - * - * Using a type constraint is not required but not doing so means T is not a - * specific type so static typing cannot happen. Note that the type constraint - * ``[Root]`` is not validated at runtime (as pure annotations are not available - * then). - * - * Apart from ``expires`` all of the arguments to the inner constructors have - * reasonable default values for new metadata. - */ -class Metadata { - constructor(signed, signatures, unrecognizedFields) { - this.signed = signed; - this.signatures = signatures || {}; - this.unrecognizedFields = unrecognizedFields || {}; - } - sign(signer, append = true) { - const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON())); - const signature = signer(bytes); - if (!append) { - this.signatures = {}; - } - this.signatures[signature.keyID] = signature; - } - verifyDelegate(delegatedRole, delegatedMetadata) { - let role; - let keys = {}; - switch (this.signed.type) { - case base_1.MetadataKind.Root: - keys = this.signed.keys; - role = this.signed.roles[delegatedRole]; - break; - case base_1.MetadataKind.Targets: - if (!this.signed.delegations) { - throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); - } - keys = this.signed.delegations.keys; - if (this.signed.delegations.roles) { - role = this.signed.delegations.roles[delegatedRole]; - } - else if (this.signed.delegations.succinctRoles) { - if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) { - role = this.signed.delegations.succinctRoles; - } - } - break; - default: - throw new TypeError('invalid metadata type'); - } - if (!role) { - throw new error_1.ValueError(`no delegation found for ${delegatedRole}`); - } - const signingKeys = new Set(); - role.keyIDs.forEach((keyID) => { - const key = keys[keyID]; - // If we dont' have the key, continue checking other keys - if (!key) { - return; - } - try { - key.verifySignature(delegatedMetadata); - signingKeys.add(key.keyID); - } - catch (error) { - // continue - } - }); - if (signingKeys.size < role.threshold) { - throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`); - } - } - equals(other) { - if (!(other instanceof Metadata)) { - return false; - } - return (this.signed.equals(other.signed) && - util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - toJSON() { - const signatures = Object.values(this.signatures).map((signature) => { - return signature.toJSON(); - }); - return { - signatures, - signed: this.signed.toJSON(), - ...this.unrecognizedFields, - }; - } - static fromJSON(type, data) { - const { signed, signatures, ...rest } = data; - if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { - throw new TypeError('signed is not defined'); - } - if (type !== signed._type) { - throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`); - } - if (!utils_1.guard.isObjectArray(signatures)) { - throw new TypeError('signatures is not an array'); - } - let signedObj; - switch (type) { - case base_1.MetadataKind.Root: - signedObj = root_1.Root.fromJSON(signed); - break; - case base_1.MetadataKind.Timestamp: - signedObj = timestamp_1.Timestamp.fromJSON(signed); - break; - case base_1.MetadataKind.Snapshot: - signedObj = snapshot_1.Snapshot.fromJSON(signed); - break; - case base_1.MetadataKind.Targets: - signedObj = targets_1.Targets.fromJSON(signed); - break; - default: - throw new TypeError('invalid metadata type'); - } - const sigMap = {}; - // Ensure that each signature is unique - signatures.forEach((sigData) => { - const sig = signature_1.Signature.fromJSON(sigData); - if (sigMap[sig.keyID]) { - throw new error_1.ValueError(`multiple signatures found for keyid: ${sig.keyID}`); - } - sigMap[sig.keyID] = sig; - }); - return new Metadata(signedObj, sigMap, rest); - } -} -exports.Metadata = Metadata; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/role.js b/node_modules/pacote/node_modules/@tufjs/models/dist/role.js deleted file mode 100644 index f7ddbc6fe3f38..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/role.js +++ /dev/null @@ -1,299 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0; -const crypto_1 = __importDefault(require("crypto")); -const minimatch_1 = require("minimatch"); -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const utils_1 = require("./utils"); -exports.TOP_LEVEL_ROLE_NAMES = [ - 'root', - 'targets', - 'snapshot', - 'timestamp', -]; -/** - * Container that defines which keys are required to sign roles metadata. - * - * Role defines how many keys are required to successfully sign the roles - * metadata, and which keys are accepted. - */ -class Role { - constructor(options) { - const { keyIDs, threshold, unrecognizedFields } = options; - if (hasDuplicates(keyIDs)) { - throw new error_1.ValueError('duplicate key IDs found'); - } - if (threshold < 1) { - throw new error_1.ValueError('threshold must be at least 1'); - } - this.keyIDs = keyIDs; - this.threshold = threshold; - this.unrecognizedFields = unrecognizedFields || {}; - } - equals(other) { - if (!(other instanceof Role)) { - return false; - } - return (this.threshold === other.threshold && - util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) && - util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); - } - toJSON() { - return { - keyids: this.keyIDs, - threshold: this.threshold, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { keyids, threshold, ...rest } = data; - if (!utils_1.guard.isStringArray(keyids)) { - throw new TypeError('keyids must be an array'); - } - if (typeof threshold !== 'number') { - throw new TypeError('threshold must be a number'); - } - return new Role({ - keyIDs: keyids, - threshold, - unrecognizedFields: rest, - }); - } -} -exports.Role = Role; -function hasDuplicates(array) { - return new Set(array).size !== array.length; -} -/** - * A container with information about a delegated role. - * - * A delegation can happen in two ways: - * - ``paths`` is set: delegates targets matching any path pattern in ``paths`` - * - ``pathHashPrefixes`` is set: delegates targets whose target path hash - * starts with any of the prefixes in ``pathHashPrefixes`` - * - * ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be - * set, at least one of them must be set. - */ -class DelegatedRole extends Role { - constructor(opts) { - super(opts); - const { name, terminating, paths, pathHashPrefixes } = opts; - this.name = name; - this.terminating = terminating; - if (opts.paths && opts.pathHashPrefixes) { - throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive'); - } - this.paths = paths; - this.pathHashPrefixes = pathHashPrefixes; - } - equals(other) { - if (!(other instanceof DelegatedRole)) { - return false; - } - return (super.equals(other) && - this.name === other.name && - this.terminating === other.terminating && - util_1.default.isDeepStrictEqual(this.paths, other.paths) && - util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes)); - } - isDelegatedPath(targetFilepath) { - if (this.paths) { - return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern)); - } - if (this.pathHashPrefixes) { - const hasher = crypto_1.default.createHash('sha256'); - const pathHash = hasher.update(targetFilepath).digest('hex'); - return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix)); - } - return false; - } - toJSON() { - const json = { - ...super.toJSON(), - name: this.name, - terminating: this.terminating, - }; - if (this.paths) { - json.paths = this.paths; - } - if (this.pathHashPrefixes) { - json.path_hash_prefixes = this.pathHashPrefixes; - } - return json; - } - static fromJSON(data) { - const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; - if (!utils_1.guard.isStringArray(keyids)) { - throw new TypeError('keyids must be an array of strings'); - } - if (typeof threshold !== 'number') { - throw new TypeError('threshold must be a number'); - } - if (typeof name !== 'string') { - throw new TypeError('name must be a string'); - } - if (typeof terminating !== 'boolean') { - throw new TypeError('terminating must be a boolean'); - } - if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { - throw new TypeError('paths must be an array of strings'); - } - if (utils_1.guard.isDefined(path_hash_prefixes) && - !utils_1.guard.isStringArray(path_hash_prefixes)) { - throw new TypeError('path_hash_prefixes must be an array of strings'); - } - return new DelegatedRole({ - keyIDs: keyids, - threshold, - name, - terminating, - paths, - pathHashPrefixes: path_hash_prefixes, - unrecognizedFields: rest, - }); - } -} -exports.DelegatedRole = DelegatedRole; -// JS version of Ruby's Array#zip -const zip = (a, b) => a.map((k, i) => [k, b[i]]); -function isTargetInPathPattern(target, pattern) { - const targetParts = target.split('/'); - const patternParts = pattern.split('/'); - if (patternParts.length != targetParts.length) { - return false; - } - return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart)); -} -/** - * Succinctly defines a hash bin delegation graph. - * - * A ``SuccinctRoles`` object describes a delegation graph that covers all - * targets, distributing them uniformly over the delegated roles (i.e. bins) - * in the graph. - * - * The total number of bins is 2 to the power of the passed ``bit_length``. - * - * Bin names are the concatenation of the passed ``name_prefix`` and a - * zero-padded hex representation of the bin index separated by a hyphen. - * - * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin - * is 'terminating'. - * - * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md - */ -class SuccinctRoles extends Role { - constructor(opts) { - super(opts); - const { bitLength, namePrefix } = opts; - if (bitLength <= 0 || bitLength > 32) { - throw new error_1.ValueError('bitLength must be between 1 and 32'); - } - this.bitLength = bitLength; - this.namePrefix = namePrefix; - // Calculate the suffix_len value based on the total number of bins in - // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will - // have a suffix between "000" and "3ff" in hex and suffix_len will be 3 - // meaning the third bin will have a suffix of "003". - this.numberOfBins = Math.pow(2, bitLength); - // suffix_len is calculated based on "number_of_bins - 1" as the name - // of the last bin contains the number "number_of_bins -1" as a suffix. - this.suffixLen = (this.numberOfBins - 1).toString(16).length; - } - equals(other) { - if (!(other instanceof SuccinctRoles)) { - return false; - } - return (super.equals(other) && - this.bitLength === other.bitLength && - this.namePrefix === other.namePrefix); - } - /*** - * Calculates the name of the delegated role responsible for 'target_filepath'. - * - * The target at path ''target_filepath' is assigned to a bin by casting - * the left-most 'bit_length' of bits of the file path hash digest to - * int, using it as bin index between 0 and '2**bit_length - 1'. - * - * Args: - * target_filepath: URL path to a target file, relative to a base - * targets URL. - */ - getRoleForTarget(targetFilepath) { - const hasher = crypto_1.default.createHash('sha256'); - const hasherBuffer = hasher.update(targetFilepath).digest(); - // can't ever need more than 4 bytes (32 bits). - const hashBytes = hasherBuffer.subarray(0, 4); - // Right shift hash bytes, so that we only have the leftmost - // bit_length bits that we care about. - const shiftValue = 32 - this.bitLength; - const binNumber = hashBytes.readUInt32BE() >>> shiftValue; - // Add zero padding if necessary and cast to hex the suffix. - const suffix = binNumber.toString(16).padStart(this.suffixLen, '0'); - return `${this.namePrefix}-${suffix}`; - } - *getRoles() { - for (let i = 0; i < this.numberOfBins; i++) { - const suffix = i.toString(16).padStart(this.suffixLen, '0'); - yield `${this.namePrefix}-${suffix}`; - } - } - /*** - * Determines whether the given ``role_name`` is in one of - * the delegated roles that ``SuccinctRoles`` represents. - * - * Args: - * role_name: The name of the role to check against. - */ - isDelegatedRole(roleName) { - const desiredPrefix = this.namePrefix + '-'; - if (!roleName.startsWith(desiredPrefix)) { - return false; - } - const suffix = roleName.slice(desiredPrefix.length, roleName.length); - if (suffix.length != this.suffixLen) { - return false; - } - // make sure the suffix is a hex string - if (!suffix.match(/^[0-9a-fA-F]+$/)) { - return false; - } - const num = parseInt(suffix, 16); - return 0 <= num && num < this.numberOfBins; - } - toJSON() { - const json = { - ...super.toJSON(), - bit_length: this.bitLength, - name_prefix: this.namePrefix, - }; - return json; - } - static fromJSON(data) { - const { keyids, threshold, bit_length, name_prefix, ...rest } = data; - if (!utils_1.guard.isStringArray(keyids)) { - throw new TypeError('keyids must be an array of strings'); - } - if (typeof threshold !== 'number') { - throw new TypeError('threshold must be a number'); - } - if (typeof bit_length !== 'number') { - throw new TypeError('bit_length must be a number'); - } - if (typeof name_prefix !== 'string') { - throw new TypeError('name_prefix must be a string'); - } - return new SuccinctRoles({ - keyIDs: keyids, - threshold, - bitLength: bit_length, - namePrefix: name_prefix, - unrecognizedFields: rest, - }); - } -} -exports.SuccinctRoles = SuccinctRoles; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/root.js b/node_modules/pacote/node_modules/@tufjs/models/dist/root.js deleted file mode 100644 index 36d0ef0f186d1..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/root.js +++ /dev/null @@ -1,116 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Root = void 0; -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const error_1 = require("./error"); -const key_1 = require("./key"); -const role_1 = require("./role"); -const utils_1 = require("./utils"); -/** - * A container for the signed part of root metadata. - * - * The top-level role and metadata file signed by the root keys. - * This role specifies trusted keys for all other top-level roles, which may further delegate trust. - */ -class Root extends base_1.Signed { - constructor(options) { - super(options); - this.type = base_1.MetadataKind.Root; - this.keys = options.keys || {}; - this.consistentSnapshot = options.consistentSnapshot ?? true; - if (!options.roles) { - this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({ - ...acc, - [role]: new role_1.Role({ keyIDs: [], threshold: 1 }), - }), {}); - } - else { - const roleNames = new Set(Object.keys(options.roles)); - if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) { - throw new error_1.ValueError('missing top-level role'); - } - this.roles = options.roles; - } - } - addKey(key, role) { - if (!this.roles[role]) { - throw new error_1.ValueError(`role ${role} does not exist`); - } - if (!this.roles[role].keyIDs.includes(key.keyID)) { - this.roles[role].keyIDs.push(key.keyID); - } - this.keys[key.keyID] = key; - } - equals(other) { - if (!(other instanceof Root)) { - return false; - } - return (super.equals(other) && - this.consistentSnapshot === other.consistentSnapshot && - util_1.default.isDeepStrictEqual(this.keys, other.keys) && - util_1.default.isDeepStrictEqual(this.roles, other.roles)); - } - toJSON() { - return { - _type: this.type, - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - keys: keysToJSON(this.keys), - roles: rolesToJSON(this.roles), - consistent_snapshot: this.consistentSnapshot, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields; - if (typeof consistent_snapshot !== 'boolean') { - throw new TypeError('consistent_snapshot must be a boolean'); - } - return new Root({ - ...commonFields, - keys: keysFromJSON(keys), - roles: rolesFromJSON(roles), - consistentSnapshot: consistent_snapshot, - unrecognizedFields: rest, - }); - } -} -exports.Root = Root; -function keysToJSON(keys) { - return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {}); -} -function rolesToJSON(roles) { - return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {}); -} -function keysFromJSON(data) { - let keys; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('keys must be an object'); - } - keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ - ...acc, - [keyID]: key_1.Key.fromJSON(keyID, keyData), - }), {}); - } - return keys; -} -function rolesFromJSON(data) { - let roles; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('roles must be an object'); - } - roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ - ...acc, - [roleName]: role_1.Role.fromJSON(roleData), - }), {}); - } - return roles; -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js b/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js deleted file mode 100644 index 33eb204eb0835..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Signature = void 0; -/** - * A container class containing information about a signature. - * - * Contains a signature and the keyid uniquely identifying the key used - * to generate the signature. - * - * Provide a `fromJSON` method to create a Signature from a JSON object. - */ -class Signature { - constructor(options) { - const { keyID, sig } = options; - this.keyID = keyID; - this.sig = sig; - } - toJSON() { - return { - keyid: this.keyID, - sig: this.sig, - }; - } - static fromJSON(data) { - const { keyid, sig } = data; - if (typeof keyid !== 'string') { - throw new TypeError('keyid must be a string'); - } - if (typeof sig !== 'string') { - throw new TypeError('sig must be a string'); - } - return new Signature({ - keyID: keyid, - sig: sig, - }); - } -} -exports.Signature = Signature; diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js deleted file mode 100644 index e90ea8e729e4e..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Snapshot = void 0; -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const file_1 = require("./file"); -const utils_1 = require("./utils"); -/** - * A container for the signed part of snapshot metadata. - * - * Snapshot contains information about all target Metadata files. - * A top-level role that specifies the latest versions of all targets metadata files, - * and hence the latest versions of all targets (including any dependencies between them) on the repository. - */ -class Snapshot extends base_1.Signed { - constructor(opts) { - super(opts); - this.type = base_1.MetadataKind.Snapshot; - this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; - } - equals(other) { - if (!(other instanceof Snapshot)) { - return false; - } - return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta); - } - toJSON() { - return { - _type: this.type, - meta: metaToJSON(this.meta), - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { meta, ...rest } = unrecognizedFields; - return new Snapshot({ - ...commonFields, - meta: metaFromJSON(meta), - unrecognizedFields: rest, - }); - } -} -exports.Snapshot = Snapshot; -function metaToJSON(meta) { - return Object.entries(meta).reduce((acc, [path, metadata]) => ({ - ...acc, - [path]: metadata.toJSON(), - }), {}); -} -function metaFromJSON(data) { - let meta; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('meta field is malformed'); - } - else { - meta = Object.entries(data).reduce((acc, [path, metadata]) => ({ - ...acc, - [path]: file_1.MetaFile.fromJSON(metadata), - }), {}); - } - } - return meta; -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js b/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js deleted file mode 100644 index 54bd8f8c554af..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js +++ /dev/null @@ -1,92 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Targets = void 0; -const util_1 = __importDefault(require("util")); -const base_1 = require("./base"); -const delegations_1 = require("./delegations"); -const file_1 = require("./file"); -const utils_1 = require("./utils"); -// Container for the signed part of targets metadata. -// -// Targets contains verifying information about target files and also delegates -// responsible to other Targets roles. -class Targets extends base_1.Signed { - constructor(options) { - super(options); - this.type = base_1.MetadataKind.Targets; - this.targets = options.targets || {}; - this.delegations = options.delegations; - } - addTarget(target) { - this.targets[target.path] = target; - } - equals(other) { - if (!(other instanceof Targets)) { - return false; - } - return (super.equals(other) && - util_1.default.isDeepStrictEqual(this.targets, other.targets) && - util_1.default.isDeepStrictEqual(this.delegations, other.delegations)); - } - toJSON() { - const json = { - _type: this.type, - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - targets: targetsToJSON(this.targets), - ...this.unrecognizedFields, - }; - if (this.delegations) { - json.delegations = this.delegations.toJSON(); - } - return json; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { targets, delegations, ...rest } = unrecognizedFields; - return new Targets({ - ...commonFields, - targets: targetsFromJSON(targets), - delegations: delegationsFromJSON(delegations), - unrecognizedFields: rest, - }); - } -} -exports.Targets = Targets; -function targetsToJSON(targets) { - return Object.entries(targets).reduce((acc, [path, target]) => ({ - ...acc, - [path]: target.toJSON(), - }), {}); -} -function targetsFromJSON(data) { - let targets; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObjectRecord(data)) { - throw new TypeError('targets must be an object'); - } - else { - targets = Object.entries(data).reduce((acc, [path, target]) => ({ - ...acc, - [path]: file_1.TargetFile.fromJSON(path, target), - }), {}); - } - } - return targets; -} -function delegationsFromJSON(data) { - let delegations; - if (utils_1.guard.isDefined(data)) { - if (!utils_1.guard.isObject(data)) { - throw new TypeError('delegations must be an object'); - } - else { - delegations = delegations_1.Delegations.fromJSON(data); - } - } - return delegations; -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js deleted file mode 100644 index 9880c4c9fc254..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Timestamp = void 0; -const base_1 = require("./base"); -const file_1 = require("./file"); -const utils_1 = require("./utils"); -/** - * A container for the signed part of timestamp metadata. - * - * A top-level that specifies the latest version of the snapshot role metadata file, - * and hence the latest versions of all metadata and targets on the repository. - */ -class Timestamp extends base_1.Signed { - constructor(options) { - super(options); - this.type = base_1.MetadataKind.Timestamp; - this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); - } - equals(other) { - if (!(other instanceof Timestamp)) { - return false; - } - return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta); - } - toJSON() { - return { - _type: this.type, - spec_version: this.specVersion, - version: this.version, - expires: this.expires, - meta: { 'snapshot.json': this.snapshotMeta.toJSON() }, - ...this.unrecognizedFields, - }; - } - static fromJSON(data) { - const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); - const { meta, ...rest } = unrecognizedFields; - return new Timestamp({ - ...commonFields, - snapshotMeta: snapshotMetaFromJSON(meta), - unrecognizedFields: rest, - }); - } -} -exports.Timestamp = Timestamp; -function snapshotMetaFromJSON(data) { - let snapshotMeta; - if (utils_1.guard.isDefined(data)) { - const snapshotData = data['snapshot.json']; - if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { - throw new TypeError('missing snapshot.json in meta'); - } - else { - snapshotMeta = file_1.MetaFile.fromJSON(snapshotData); - } - } - return snapshotMeta; -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js deleted file mode 100644 index 911e8475986bb..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isDefined = isDefined; -exports.isObject = isObject; -exports.isStringArray = isStringArray; -exports.isObjectArray = isObjectArray; -exports.isStringRecord = isStringRecord; -exports.isObjectRecord = isObjectRecord; -function isDefined(val) { - return val !== undefined; -} -function isObject(value) { - return typeof value === 'object' && value !== null; -} -function isStringArray(value) { - return Array.isArray(value) && value.every((v) => typeof v === 'string'); -} -function isObjectArray(value) { - return Array.isArray(value) && value.every(isObject); -} -function isStringRecord(value) { - return (typeof value === 'object' && - value !== null && - Object.keys(value).every((k) => typeof k === 'string') && - Object.values(value).every((v) => typeof v === 'string')); -} -function isObjectRecord(value) { - return (typeof value === 'object' && - value !== null && - Object.keys(value).every((k) => typeof k === 'string') && - Object.values(value).every((v) => typeof v === 'object' && v !== null)); -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js deleted file mode 100644 index 872aae28049c9..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.crypto = exports.guard = void 0; -exports.guard = __importStar(require("./guard")); -exports.crypto = __importStar(require("./verify")); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js deleted file mode 100644 index 3c3ec07f1425a..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js +++ /dev/null @@ -1,142 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getPublicKey = getPublicKey; -const crypto_1 = __importDefault(require("crypto")); -const error_1 = require("../error"); -const oid_1 = require("./oid"); -const ASN1_TAG_SEQUENCE = 0x30; -const ANS1_TAG_BIT_STRING = 0x03; -const NULL_BYTE = 0x00; -const OID_EDDSA = '1.3.101.112'; -const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1'; -const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7'; -const PEM_HEADER = '-----BEGIN PUBLIC KEY-----'; -function getPublicKey(keyInfo) { - switch (keyInfo.keyType) { - case 'rsa': - return getRSAPublicKey(keyInfo); - case 'ed25519': - return getED25519PublicKey(keyInfo); - case 'ecdsa': - case 'ecdsa-sha2-nistp256': - case 'ecdsa-sha2-nistp384': - return getECDCSAPublicKey(keyInfo); - default: - throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`); - } -} -function getRSAPublicKey(keyInfo) { - // Only support PEM-encoded RSA keys - if (!keyInfo.keyVal.startsWith(PEM_HEADER)) { - throw new error_1.CryptoError('Invalid key format'); - } - const key = crypto_1.default.createPublicKey(keyInfo.keyVal); - switch (keyInfo.scheme) { - case 'rsassa-pss-sha256': - return { - key: key, - padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING, - }; - default: - throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`); - } -} -function getED25519PublicKey(keyInfo) { - let key; - // If key is already PEM-encoded we can just parse it - if (keyInfo.keyVal.startsWith(PEM_HEADER)) { - key = crypto_1.default.createPublicKey(keyInfo.keyVal); - } - else { - // If key is not PEM-encoded it had better be hex - if (!isHex(keyInfo.keyVal)) { - throw new error_1.CryptoError('Invalid key format'); - } - key = crypto_1.default.createPublicKey({ - key: ed25519.hexToDER(keyInfo.keyVal), - format: 'der', - type: 'spki', - }); - } - return { key }; -} -function getECDCSAPublicKey(keyInfo) { - let key; - // If key is already PEM-encoded we can just parse it - if (keyInfo.keyVal.startsWith(PEM_HEADER)) { - key = crypto_1.default.createPublicKey(keyInfo.keyVal); - } - else { - // If key is not PEM-encoded it had better be hex - if (!isHex(keyInfo.keyVal)) { - throw new error_1.CryptoError('Invalid key format'); - } - key = crypto_1.default.createPublicKey({ - key: ecdsa.hexToDER(keyInfo.keyVal), - format: 'der', - type: 'spki', - }); - } - return { key }; -} -const ed25519 = { - // Translates a hex key into a crypto KeyObject - // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/ - hexToDER: (hex) => { - const key = Buffer.from(hex, 'hex'); - const oid = (0, oid_1.encodeOIDString)(OID_EDDSA); - // Create a byte sequence containing the OID and key - const elements = Buffer.concat([ - Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([oid.length]), - oid, - ]), - Buffer.concat([ - Buffer.from([ANS1_TAG_BIT_STRING]), - Buffer.from([key.length + 1]), - Buffer.from([NULL_BYTE]), - key, - ]), - ]); - // Wrap up by creating a sequence of elements - const der = Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([elements.length]), - elements, - ]); - return der; - }, -}; -const ecdsa = { - hexToDER: (hex) => { - const key = Buffer.from(hex, 'hex'); - const bitString = Buffer.concat([ - Buffer.from([ANS1_TAG_BIT_STRING]), - Buffer.from([key.length + 1]), - Buffer.from([NULL_BYTE]), - key, - ]); - const oids = Buffer.concat([ - (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY), - (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1), - ]); - const oidSequence = Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([oids.length]), - oids, - ]); - // Wrap up by creating a sequence of elements - const der = Buffer.concat([ - Buffer.from([ASN1_TAG_SEQUENCE]), - Buffer.from([oidSequence.length + bitString.length]), - oidSequence, - bitString, - ]); - return der; - }, -}; -const isHex = (key) => /^[0-9a-fA-F]+$/.test(key); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js deleted file mode 100644 index 00b29c3030d1e..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.encodeOIDString = encodeOIDString; -const ANS1_TAG_OID = 0x06; -function encodeOIDString(oid) { - const parts = oid.split('.'); - // The first two subidentifiers are encoded into the first byte - const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10); - const rest = []; - parts.slice(2).forEach((part) => { - const bytes = encodeVariableLengthInteger(parseInt(part, 10)); - rest.push(...bytes); - }); - const der = Buffer.from([first, ...rest]); - return Buffer.from([ANS1_TAG_OID, der.length, ...der]); -} -function encodeVariableLengthInteger(value) { - const bytes = []; - let mask = 0x00; - while (value > 0) { - bytes.unshift((value & 0x7f) | mask); - value >>= 7; - mask = 0x80; - } - return bytes; -} diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js deleted file mode 100644 index c8ad2e549bdc6..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js deleted file mode 100644 index 8232b6f6a97ab..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifySignature = void 0; -const canonical_json_1 = require("@tufjs/canonical-json"); -const crypto_1 = __importDefault(require("crypto")); -const verifySignature = (metaDataSignedData, key, signature) => { - const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData)); - return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex')); -}; -exports.verifySignature = verifySignature; diff --git a/node_modules/pacote/node_modules/@tufjs/models/package.json b/node_modules/pacote/node_modules/@tufjs/models/package.json deleted file mode 100644 index 8e5132ddf1079..0000000000000 --- a/node_modules/pacote/node_modules/@tufjs/models/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@tufjs/models", - "version": "3.0.1", - "description": "TUF metadata models", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "files": [ - "dist" - ], - "scripts": { - "build": "tsc --build", - "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", - "test": "jest" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/theupdateframework/tuf-js.git" - }, - "keywords": [ - "tuf", - "security", - "update" - ], - "author": "bdehamer@github.com", - "license": "MIT", - "bugs": { - "url": "https://github.com/theupdateframework/tuf-js/issues" - }, - "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", - "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.5" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/pacote/node_modules/sigstore/LICENSE b/node_modules/pacote/node_modules/sigstore/LICENSE deleted file mode 100644 index e9e7c1679a09d..0000000000000 --- a/node_modules/pacote/node_modules/sigstore/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2023 The Sigstore Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/pacote/node_modules/sigstore/dist/config.js b/node_modules/pacote/node_modules/sigstore/dist/config.js deleted file mode 100644 index e8b2392f97f23..0000000000000 --- a/node_modules/pacote/node_modules/sigstore/dist/config.js +++ /dev/null @@ -1,120 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0; -exports.createBundleBuilder = createBundleBuilder; -exports.createKeyFinder = createKeyFinder; -exports.createVerificationPolicy = createVerificationPolicy; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const core_1 = require("@sigstore/core"); -const sign_1 = require("@sigstore/sign"); -const verify_1 = require("@sigstore/verify"); -exports.DEFAULT_RETRY = { retries: 2 }; -exports.DEFAULT_TIMEOUT = 5000; -function createBundleBuilder(bundleType, options) { - const bundlerOptions = { - signer: initSigner(options), - witnesses: initWitnesses(options), - }; - switch (bundleType) { - case 'messageSignature': - return new sign_1.MessageSignatureBundleBuilder(bundlerOptions); - case 'dsseEnvelope': - return new sign_1.DSSEBundleBuilder({ - ...bundlerOptions, - certificateChain: options.legacyCompatibility, - }); - } -} -// Translates the public KeySelector type into the KeyFinderFunc type needed by -// the verifier. -function createKeyFinder(keySelector) { - return (hint) => { - const key = keySelector(hint); - if (!key) { - throw new verify_1.VerificationError({ - code: 'PUBLIC_KEY_ERROR', - message: `key not found: ${hint}`, - }); - } - return { - publicKey: core_1.crypto.createPublicKey(key), - validFor: () => true, - }; - }; -} -function createVerificationPolicy(options) { - const policy = {}; - const san = options.certificateIdentityEmail || options.certificateIdentityURI; - if (san) { - policy.subjectAlternativeName = san; - } - if (options.certificateIssuer) { - policy.extensions = { issuer: options.certificateIssuer }; - } - return policy; -} -// Instantiate the FulcioSigner based on the supplied options. -function initSigner(options) { - return new sign_1.FulcioSigner({ - fulcioBaseURL: options.fulcioURL, - identityProvider: options.identityProvider || initIdentityProvider(options), - retry: options.retry ?? exports.DEFAULT_RETRY, - timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, - }); -} -// Instantiate an identity provider based on the supplied options. If an -// explicit identity token is provided, use that. Otherwise, use the CI -// context provider. -function initIdentityProvider(options) { - const token = options.identityToken; - if (token) { - /* istanbul ignore next */ - return { getToken: () => Promise.resolve(token) }; - } - else { - return new sign_1.CIContextProvider('sigstore'); - } -} -// Instantiate a collection of witnesses based on the supplied options. -function initWitnesses(options) { - const witnesses = []; - if (isRekorEnabled(options)) { - witnesses.push(new sign_1.RekorWitness({ - rekorBaseURL: options.rekorURL, - entryType: options.legacyCompatibility ? 'intoto' : 'dsse', - fetchOnConflict: false, - retry: options.retry ?? exports.DEFAULT_RETRY, - timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, - })); - } - if (isTSAEnabled(options)) { - witnesses.push(new sign_1.TSAWitness({ - tsaBaseURL: options.tsaServerURL, - retry: options.retry ?? exports.DEFAULT_RETRY, - timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, - })); - } - return witnesses; -} -// Type assertion to ensure that Rekor is enabled -function isRekorEnabled(options) { - return options.tlogUpload !== false; -} -// Type assertion to ensure that TSA is enabled -function isTSAEnabled(options) { - return options.tsaServerURL !== undefined; -} diff --git a/node_modules/pacote/node_modules/sigstore/dist/index.js b/node_modules/pacote/node_modules/sigstore/dist/index.js deleted file mode 100644 index 7f6a5cf86bbfc..0000000000000 --- a/node_modules/pacote/node_modules/sigstore/dist/index.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0; -/* -Copyright 2022 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var bundle_1 = require("@sigstore/bundle"); -Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } }); -var sign_1 = require("@sigstore/sign"); -Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } }); -Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } }); -Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } }); -var tuf_1 = require("@sigstore/tuf"); -Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } }); -var verify_1 = require("@sigstore/verify"); -Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return verify_1.PolicyError; } }); -Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return verify_1.VerificationError; } }); -var sigstore_1 = require("./sigstore"); -Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } }); -Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } }); -Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } }); -Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } }); diff --git a/node_modules/pacote/node_modules/sigstore/dist/sigstore.js b/node_modules/pacote/node_modules/sigstore/dist/sigstore.js deleted file mode 100644 index 2b37ef46b7438..0000000000000 --- a/node_modules/pacote/node_modules/sigstore/dist/sigstore.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.sign = sign; -exports.attest = attest; -exports.verify = verify; -exports.createVerifier = createVerifier; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const bundle_1 = require("@sigstore/bundle"); -const tuf = __importStar(require("@sigstore/tuf")); -const verify_1 = require("@sigstore/verify"); -const config = __importStar(require("./config")); -async function sign(payload, -/* istanbul ignore next */ -options = {}) { - const bundler = config.createBundleBuilder('messageSignature', options); - const bundle = await bundler.create({ data: payload }); - return (0, bundle_1.bundleToJSON)(bundle); -} -async function attest(payload, payloadType, -/* istanbul ignore next */ -options = {}) { - const bundler = config.createBundleBuilder('dsseEnvelope', options); - const bundle = await bundler.create({ data: payload, type: payloadType }); - return (0, bundle_1.bundleToJSON)(bundle); -} -async function verify(bundle, dataOrOptions, options) { - let data; - if (Buffer.isBuffer(dataOrOptions)) { - data = dataOrOptions; - } - else { - options = dataOrOptions; - } - return createVerifier(options).then((verifier) => verifier.verify(bundle, data)); -} -async function createVerifier( -/* istanbul ignore next */ -options = {}) { - const trustedRoot = await tuf.getTrustedRoot({ - mirrorURL: options.tufMirrorURL, - rootPath: options.tufRootPath, - cachePath: options.tufCachePath, - forceCache: options.tufForceCache, - retry: options.retry ?? config.DEFAULT_RETRY, - timeout: options.timeout ?? config.DEFAULT_TIMEOUT, - }); - const keyFinder = options.keySelector - ? config.createKeyFinder(options.keySelector) - : undefined; - const trustMaterial = (0, verify_1.toTrustMaterial)(trustedRoot, keyFinder); - const verifierOptions = { - ctlogThreshold: options.ctLogThreshold, - tlogThreshold: options.tlogThreshold, - }; - const verifier = new verify_1.Verifier(trustMaterial, verifierOptions); - const policy = config.createVerificationPolicy(options); - return { - verify: (bundle, payload) => { - const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle); - const signedEntity = (0, verify_1.toSignedEntity)(deserializedBundle, payload); - verifier.verify(signedEntity, policy); - return; - }, - }; -} diff --git a/node_modules/pacote/node_modules/sigstore/package.json b/node_modules/pacote/node_modules/sigstore/package.json deleted file mode 100644 index 0f798a263657b..0000000000000 --- a/node_modules/pacote/node_modules/sigstore/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "sigstore", - "version": "3.0.0", - "description": "code-signing for npm packages", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "clean": "shx rm -rf dist *.tsbuildinfo", - "build": "tsc --build", - "test": "jest" - }, - "files": [ - "dist", - "store" - ], - "author": "bdehamer@github.com", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "bugs": { - "url": "https://github.com/sigstore/sigstore-js/issues" - }, - "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/client#readme", - "publishConfig": { - "provenance": true - }, - "devDependencies": { - "@sigstore/rekor-types": "^3.0.0", - "@sigstore/jest": "^0.0.0", - "@sigstore/mock": "^0.8.0", - "@tufjs/repo-mock": "^3.0.1", - "@types/make-fetch-happen": "^10.0.4" - }, - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/sign": "^3.0.0", - "@sigstore/tuf": "^3.0.0", - "@sigstore/verify": "^2.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/pacote/node_modules/tuf-js/LICENSE b/node_modules/pacote/node_modules/tuf-js/LICENSE deleted file mode 100644 index 420700f5d3765..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 GitHub and the TUF Contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/pacote/node_modules/tuf-js/dist/config.js b/node_modules/pacote/node_modules/tuf-js/dist/config.js deleted file mode 100644 index c66d76af86b98..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/config.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.defaultConfig = void 0; -exports.defaultConfig = { - maxRootRotations: 256, - maxDelegations: 32, - rootMaxLength: 512000, //bytes - timestampMaxLength: 16384, // bytes - snapshotMaxLength: 2000000, // bytes - targetsMaxLength: 5000000, // bytes - prefixTargetsWithHash: true, - fetchTimeout: 100000, // milliseconds - fetchRetries: undefined, - fetchRetry: 2, -}; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/error.js b/node_modules/pacote/node_modules/tuf-js/dist/error.js deleted file mode 100644 index f4b10fa202895..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/error.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; -// An error about insufficient values -class ValueError extends Error { -} -exports.ValueError = ValueError; -class RuntimeError extends Error { -} -exports.RuntimeError = RuntimeError; -class PersistError extends Error { -} -exports.PersistError = PersistError; -// An error with a repository's state, such as a missing file. -// It covers all exceptions that come from the repository side when -// looking from the perspective of users of metadata API or ngclient. -class RepositoryError extends Error { -} -exports.RepositoryError = RepositoryError; -// An error for metadata that contains an invalid version number. -class BadVersionError extends RepositoryError { -} -exports.BadVersionError = BadVersionError; -// An error for metadata containing a previously verified version number. -class EqualVersionError extends BadVersionError { -} -exports.EqualVersionError = EqualVersionError; -// Indicate that a TUF Metadata file has expired. -class ExpiredMetadataError extends RepositoryError { -} -exports.ExpiredMetadataError = ExpiredMetadataError; -//----- Download Errors ------------------------------------------------------- -// An error occurred while attempting to download a file. -class DownloadError extends Error { -} -exports.DownloadError = DownloadError; -// Indicate that a mismatch of lengths was seen while downloading a file -class DownloadLengthMismatchError extends DownloadError { -} -exports.DownloadLengthMismatchError = DownloadLengthMismatchError; -// Returned by FetcherInterface implementations for HTTP errors. -class DownloadHTTPError extends DownloadError { - constructor(message, statusCode) { - super(message); - this.statusCode = statusCode; - } -} -exports.DownloadHTTPError = DownloadHTTPError; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js b/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js deleted file mode 100644 index f966ce1bb0cdc..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js +++ /dev/null @@ -1,84 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DefaultFetcher = exports.BaseFetcher = void 0; -const debug_1 = __importDefault(require("debug")); -const fs_1 = __importDefault(require("fs")); -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); -const util_1 = __importDefault(require("util")); -const error_1 = require("./error"); -const tmpfile_1 = require("./utils/tmpfile"); -const log = (0, debug_1.default)('tuf:fetch'); -class BaseFetcher { - // Download file from given URL. The file is downloaded to a temporary - // location and then passed to the given handler. The handler is responsible - // for moving the file to its final location. The temporary file is deleted - // after the handler returns. - async downloadFile(url, maxLength, handler) { - return (0, tmpfile_1.withTempFile)(async (tmpFile) => { - const reader = await this.fetch(url); - let numberOfBytesReceived = 0; - const fileStream = fs_1.default.createWriteStream(tmpFile); - // Read the stream a chunk at a time so that we can check - // the length of the file as we go - try { - for await (const chunk of reader) { - const bufferChunk = Buffer.from(chunk); - numberOfBytesReceived += bufferChunk.length; - if (numberOfBytesReceived > maxLength) { - throw new error_1.DownloadLengthMismatchError('Max length reached'); - } - await writeBufferToStream(fileStream, bufferChunk); - } - } - finally { - // Make sure we always close the stream - await util_1.default.promisify(fileStream.close).bind(fileStream)(); - } - return handler(tmpFile); - }); - } - // Download bytes from given URL. - async downloadBytes(url, maxLength) { - return this.downloadFile(url, maxLength, async (file) => { - const stream = fs_1.default.createReadStream(file); - const chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } - return Buffer.concat(chunks); - }); - } -} -exports.BaseFetcher = BaseFetcher; -class DefaultFetcher extends BaseFetcher { - constructor(options = {}) { - super(); - this.timeout = options.timeout; - this.retry = options.retry; - } - async fetch(url) { - log('GET %s', url); - const response = await (0, make_fetch_happen_1.default)(url, { - timeout: this.timeout, - retry: this.retry, - }); - if (!response.ok || !response?.body) { - throw new error_1.DownloadHTTPError('Failed to download', response.status); - } - return response.body; - } -} -exports.DefaultFetcher = DefaultFetcher; -const writeBufferToStream = async (stream, buffer) => { - return new Promise((resolve, reject) => { - stream.write(buffer, (err) => { - if (err) { - reject(err); - } - resolve(true); - }); - }); -}; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/index.js b/node_modules/pacote/node_modules/tuf-js/dist/index.js deleted file mode 100644 index 5a83b91f355d8..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/index.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0; -var models_1 = require("@tufjs/models"); -Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } }); -var fetcher_1 = require("./fetcher"); -Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } }); -var updater_1 = require("./updater"); -Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } }); diff --git a/node_modules/pacote/node_modules/tuf-js/dist/store.js b/node_modules/pacote/node_modules/tuf-js/dist/store.js deleted file mode 100644 index 8567336108709..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/store.js +++ /dev/null @@ -1,208 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TrustedMetadataStore = void 0; -const models_1 = require("@tufjs/models"); -const error_1 = require("./error"); -class TrustedMetadataStore { - constructor(rootData) { - this.trustedSet = {}; - // Client workflow 5.1: record fixed update start time - this.referenceTime = new Date(); - // Client workflow 5.2: load trusted root metadata - this.loadTrustedRoot(rootData); - } - get root() { - if (!this.trustedSet.root) { - throw new ReferenceError('No trusted root metadata'); - } - return this.trustedSet.root; - } - get timestamp() { - return this.trustedSet.timestamp; - } - get snapshot() { - return this.trustedSet.snapshot; - } - get targets() { - return this.trustedSet.targets; - } - getRole(name) { - return this.trustedSet[name]; - } - updateRoot(bytesBuffer) { - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); - if (newRoot.signed.type != models_1.MetadataKind.Root) { - throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`); - } - // Client workflow 5.4: check for arbitrary software attack - this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot); - // Client workflow 5.5: check for rollback attack - if (newRoot.signed.version != this.root.signed.version + 1) { - throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`); - } - // Check that new root is signed by self - newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot); - // Client workflow 5.7: set new root as trusted root - this.trustedSet.root = newRoot; - return newRoot; - } - updateTimestamp(bytesBuffer) { - if (this.snapshot) { - throw new error_1.RuntimeError('Cannot update timestamp after snapshot'); - } - if (this.root.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError('Final root.json is expired'); - } - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data); - if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) { - throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`); - } - // Client workflow 5.4.2: check for arbitrary software attack - this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp); - if (this.timestamp) { - // Prevent rolling back timestamp version - // Client workflow 5.4.3.1: check for rollback attack - if (newTimestamp.signed.version < this.timestamp.signed.version) { - throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`); - } - // Keep using old timestamp if versions are equal. - if (newTimestamp.signed.version === this.timestamp.signed.version) { - throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`); - } - // Prevent rolling back snapshot version - // Client workflow 5.4.3.2: check for rollback attack - const snapshotMeta = this.timestamp.signed.snapshotMeta; - const newSnapshotMeta = newTimestamp.signed.snapshotMeta; - if (newSnapshotMeta.version < snapshotMeta.version) { - throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`); - } - } - // expiry not checked to allow old timestamp to be used for rollback - // protection of new timestamp: expiry is checked in update_snapshot - this.trustedSet.timestamp = newTimestamp; - // Client workflow 5.4.4: check for freeze attack - this.checkFinalTimestamp(); - return newTimestamp; - } - updateSnapshot(bytesBuffer, trusted = false) { - if (!this.timestamp) { - throw new error_1.RuntimeError('Cannot update snapshot before timestamp'); - } - if (this.targets) { - throw new error_1.RuntimeError('Cannot update snapshot after targets'); - } - // Snapshot cannot be loaded if final timestamp is expired - this.checkFinalTimestamp(); - const snapshotMeta = this.timestamp.signed.snapshotMeta; - // Verify non-trusted data against the hashes in timestamp, if any. - // Trusted snapshot data has already been verified once. - // Client workflow 5.5.2: check against timestamp role's snaphsot hash - if (!trusted) { - snapshotMeta.verify(bytesBuffer); - } - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data); - if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) { - throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`); - } - // Client workflow 5.5.3: check for arbitrary software attack - this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot); - // version check against meta version (5.5.4) is deferred to allow old - // snapshot to be used in rollback protection - // Client workflow 5.5.5: check for rollback attack - if (this.snapshot) { - Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => { - const newFileInfo = newSnapshot.signed.meta[fileName]; - if (!newFileInfo) { - throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`); - } - if (newFileInfo.version < fileInfo.version) { - throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`); - } - }); - } - this.trustedSet.snapshot = newSnapshot; - // snapshot is loaded, but we raise if it's not valid _final_ snapshot - // Client workflow 5.5.4 & 5.5.6 - this.checkFinalSnapsnot(); - return newSnapshot; - } - updateDelegatedTargets(bytesBuffer, roleName, delegatorName) { - if (!this.snapshot) { - throw new error_1.RuntimeError('Cannot update delegated targets before snapshot'); - } - // Targets cannot be loaded if final snapshot is expired or its version - // does not match meta version in timestamp. - this.checkFinalSnapsnot(); - const delegator = this.trustedSet[delegatorName]; - if (!delegator) { - throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`); - } - // Extract metadata for the delegated role from snapshot - const meta = this.snapshot.signed.meta?.[`${roleName}.json`]; - if (!meta) { - throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`); - } - // Client workflow 5.6.2: check against snapshot role's targets hash - meta.verify(bytesBuffer); - const data = JSON.parse(bytesBuffer.toString('utf8')); - const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data); - if (newDelegate.signed.type != models_1.MetadataKind.Targets) { - throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`); - } - // Client workflow 5.6.3: check for arbitrary software attack - delegator.verifyDelegate(roleName, newDelegate); - // Client workflow 5.6.4: Check against snapshot role’s targets version - const version = newDelegate.signed.version; - if (version != meta.version) { - throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`); - } - // Client workflow 5.6.5: check for a freeze attack - if (newDelegate.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`); - } - this.trustedSet[roleName] = newDelegate; - } - // Verifies and loads data as trusted root metadata. - // Note that an expired initial root is still considered valid. - loadTrustedRoot(bytesBuffer) { - const data = JSON.parse(bytesBuffer.toString('utf8')); - const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); - if (root.signed.type != models_1.MetadataKind.Root) { - throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`); - } - root.verifyDelegate(models_1.MetadataKind.Root, root); - this.trustedSet['root'] = root; - } - checkFinalTimestamp() { - // Timestamp MUST be loaded - if (!this.timestamp) { - throw new ReferenceError('No trusted timestamp metadata'); - } - // Client workflow 5.4.4: check for freeze attack - if (this.timestamp.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError('Final timestamp.json is expired'); - } - } - checkFinalSnapsnot() { - // Snapshot and timestamp MUST be loaded - if (!this.snapshot) { - throw new ReferenceError('No trusted snapshot metadata'); - } - if (!this.timestamp) { - throw new ReferenceError('No trusted timestamp metadata'); - } - // Client workflow 5.5.6: check for freeze attack - if (this.snapshot.signed.isExpired(this.referenceTime)) { - throw new error_1.ExpiredMetadataError('snapshot.json is expired'); - } - // Client workflow 5.5.4: check against timestamp role’s snapshot version - const snapshotMeta = this.timestamp.signed.snapshotMeta; - if (this.snapshot.signed.version !== snapshotMeta.version) { - throw new error_1.BadVersionError("Snapshot version doesn't match timestamp"); - } - } -} -exports.TrustedMetadataStore = TrustedMetadataStore; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/updater.js b/node_modules/pacote/node_modules/tuf-js/dist/updater.js deleted file mode 100644 index 8d5eb4428f044..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/updater.js +++ /dev/null @@ -1,350 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Updater = void 0; -const models_1 = require("@tufjs/models"); -const debug_1 = __importDefault(require("debug")); -const fs = __importStar(require("fs")); -const path = __importStar(require("path")); -const config_1 = require("./config"); -const error_1 = require("./error"); -const fetcher_1 = require("./fetcher"); -const store_1 = require("./store"); -const url = __importStar(require("./utils/url")); -const log = (0, debug_1.default)('tuf:cache'); -class Updater { - constructor(options) { - const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; - this.dir = metadataDir; - this.metadataBaseUrl = metadataBaseUrl; - this.targetDir = targetDir; - this.targetBaseUrl = targetBaseUrl; - this.forceCache = options.forceCache ?? false; - const data = this.loadLocalMetadata(models_1.MetadataKind.Root); - this.trustedSet = new store_1.TrustedMetadataStore(data); - this.config = { ...config_1.defaultConfig, ...config }; - this.fetcher = - fetcher || - new fetcher_1.DefaultFetcher({ - timeout: this.config.fetchTimeout, - retry: this.config.fetchRetries ?? this.config.fetchRetry, - }); - } - // refresh and load the metadata before downloading the target - // refresh should be called once after the client is initialized - async refresh() { - // If forceCache is true, try to load the timestamp from local storage - // without fetching it from the remote. Otherwise, load the root and - // timestamp from the remote per the TUF spec. - if (this.forceCache) { - // If anything fails, load the root and timestamp from the remote. This - // should cover any situation where the local metadata is corrupted or - // expired. - try { - await this.loadTimestamp({ checkRemote: false }); - } - catch (error) { - await this.loadRoot(); - await this.loadTimestamp(); - } - } - else { - await this.loadRoot(); - await this.loadTimestamp(); - } - await this.loadSnapshot(); - await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root); - } - // Returns the TargetFile instance with information for the given target path. - // - // Implicitly calls refresh if it hasn't already been called. - async getTargetInfo(targetPath) { - if (!this.trustedSet.targets) { - await this.refresh(); - } - return this.preorderDepthFirstWalk(targetPath); - } - async downloadTarget(targetInfo, filePath, targetBaseUrl) { - const targetPath = filePath || this.generateTargetPath(targetInfo); - if (!targetBaseUrl) { - if (!this.targetBaseUrl) { - throw new error_1.ValueError('Target base URL not set'); - } - targetBaseUrl = this.targetBaseUrl; - } - let targetFilePath = targetInfo.path; - const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot; - if (consistentSnapshot && this.config.prefixTargetsWithHash) { - const hashes = Object.values(targetInfo.hashes); - const { dir, base } = path.parse(targetFilePath); - const filename = `${hashes[0]}.${base}`; - targetFilePath = dir ? `${dir}/${filename}` : filename; - } - const targetUrl = url.join(targetBaseUrl, targetFilePath); - // Client workflow 5.7.3: download target file - await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => { - // Verify hashes and length of downloaded file - await targetInfo.verify(fs.createReadStream(fileName)); - // Copy file to target path - log('WRITE %s', targetPath); - fs.copyFileSync(fileName, targetPath); - }); - return targetPath; - } - async findCachedTarget(targetInfo, filePath) { - if (!filePath) { - filePath = this.generateTargetPath(targetInfo); - } - try { - if (fs.existsSync(filePath)) { - await targetInfo.verify(fs.createReadStream(filePath)); - return filePath; - } - } - catch (error) { - return; // File not found - } - return; // File not found - } - loadLocalMetadata(fileName) { - const filePath = path.join(this.dir, `${fileName}.json`); - log('READ %s', filePath); - return fs.readFileSync(filePath); - } - // Sequentially load and persist on local disk every newer root metadata - // version available on the remote. - // Client workflow 5.3: update root role - async loadRoot() { - // Client workflow 5.3.2: version of trusted root metadata file - const rootVersion = this.trustedSet.root.signed.version; - const lowerBound = rootVersion + 1; - const upperBound = lowerBound + this.config.maxRootRotations; - for (let version = lowerBound; version < upperBound; version++) { - const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`); - try { - // Client workflow 5.3.3: download new root metadata file - const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength); - // Client workflow 5.3.4 - 5.4.7 - this.trustedSet.updateRoot(bytesData); - // Client workflow 5.3.8: persist root metadata file - this.persistMetadata(models_1.MetadataKind.Root, bytesData); - } - catch (error) { - if (error instanceof error_1.DownloadHTTPError) { - // 404/403 means current root is newest available - if ([403, 404].includes(error.statusCode)) { - break; - } - } - throw error; - } - } - } - // Load local and remote timestamp metadata. - // Client workflow 5.4: update timestamp role - async loadTimestamp({ checkRemote } = { checkRemote: true }) { - // Load local and remote timestamp metadata - try { - const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp); - this.trustedSet.updateTimestamp(data); - // If checkRemote is disabled, return here to avoid fetching the remote - // timestamp metadata. - if (!checkRemote) { - return; - } - } - catch (error) { - // continue - } - //Load from remote (whether local load succeeded or not) - const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json'); - // Client workflow 5.4.1: download timestamp metadata file - const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength); - try { - // Client workflow 5.4.2 - 5.4.4 - this.trustedSet.updateTimestamp(bytesData); - } - catch (error) { - // If new timestamp version is same as current, discardd the new one. - // This is normal and should NOT raise an error. - if (error instanceof error_1.EqualVersionError) { - return; - } - // Re-raise any other error - throw error; - } - // Client workflow 5.4.5: persist timestamp metadata - this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData); - } - // Load local and remote snapshot metadata. - // Client workflow 5.5: update snapshot role - async loadSnapshot() { - //Load local (and if needed remote) snapshot metadata - try { - const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot); - this.trustedSet.updateSnapshot(data, true); - } - catch (error) { - if (!this.trustedSet.timestamp) { - throw new ReferenceError('No timestamp metadata'); - } - const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta; - const maxLength = snapshotMeta.length || this.config.snapshotMaxLength; - const version = this.trustedSet.root.signed.consistentSnapshot - ? snapshotMeta.version - : undefined; - const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json'); - try { - // Client workflow 5.5.1: download snapshot metadata file - const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength); - // Client workflow 5.5.2 - 5.5.6 - this.trustedSet.updateSnapshot(bytesData); - // Client workflow 5.5.7: persist snapshot metadata file - this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData); - } - catch (error) { - throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`); - } - } - } - // Load local and remote targets metadata. - // Client workflow 5.6: update targets role - async loadTargets(role, parentRole) { - if (this.trustedSet.getRole(role)) { - return this.trustedSet.getRole(role); - } - try { - const buffer = this.loadLocalMetadata(role); - this.trustedSet.updateDelegatedTargets(buffer, role, parentRole); - } - catch (error) { - // Local 'role' does not exist or is invalid: update from remote - if (!this.trustedSet.snapshot) { - throw new ReferenceError('No snapshot metadata'); - } - const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`]; - // TODO: use length for fetching - const maxLength = metaInfo.length || this.config.targetsMaxLength; - const version = this.trustedSet.root.signed.consistentSnapshot - ? metaInfo.version - : undefined; - const encodedRole = encodeURIComponent(role); - const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`); - try { - // Client workflow 5.6.1: download targets metadata file - const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength); - // Client workflow 5.6.2 - 5.6.6 - this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole); - // Client workflow 5.6.7: persist targets metadata file - this.persistMetadata(role, bytesData); - } - catch (error) { - throw new error_1.RuntimeError(`Unable to load targets error ${error}`); - } - } - return this.trustedSet.getRole(role); - } - async preorderDepthFirstWalk(targetPath) { - // Interrogates the tree of target delegations in order of appearance - // (which implicitly order trustworthiness), and returns the matching - // target found in the most trusted role. - // List of delegations to be interrogated. A (role, parent role) pair - // is needed to load and verify the delegated targets metadata. - const delegationsToVisit = [ - { - roleName: models_1.MetadataKind.Targets, - parentRoleName: models_1.MetadataKind.Root, - }, - ]; - const visitedRoleNames = new Set(); - // Client workflow 5.6.7: preorder depth-first traversal of the graph of - // target delegations - while (visitedRoleNames.size <= this.config.maxDelegations && - delegationsToVisit.length > 0) { - // Pop the role name from the top of the stack. - const { roleName, parentRoleName } = delegationsToVisit.pop(); - // Skip any visited current role to prevent cycles. - // Client workflow 5.6.7.1: skip already-visited roles - if (visitedRoleNames.has(roleName)) { - continue; - } - // The metadata for 'role_name' must be downloaded/updated before - // its targets, delegations, and child roles can be inspected. - const targets = (await this.loadTargets(roleName, parentRoleName)) - ?.signed; - if (!targets) { - continue; - } - const target = targets.targets?.[targetPath]; - if (target) { - return target; - } - // After preorder check, add current role to set of visited roles. - visitedRoleNames.add(roleName); - if (targets.delegations) { - const childRolesToVisit = []; - // NOTE: This may be a slow operation if there are many delegated roles. - const rolesForTarget = targets.delegations.rolesForTarget(targetPath); - for (const { role: childName, terminating } of rolesForTarget) { - childRolesToVisit.push({ - roleName: childName, - parentRoleName: roleName, - }); - // Client workflow 5.6.7.2.1 - if (terminating) { - delegationsToVisit.splice(0); // empty the array - break; - } - } - childRolesToVisit.reverse(); - delegationsToVisit.push(...childRolesToVisit); - } - } - return; // no matching target found - } - generateTargetPath(targetInfo) { - if (!this.targetDir) { - throw new error_1.ValueError('Target directory not set'); - } - // URL encode target path - const filePath = encodeURIComponent(targetInfo.path); - return path.join(this.targetDir, filePath); - } - persistMetadata(metaDataName, bytesData) { - const encodedName = encodeURIComponent(metaDataName); - try { - const filePath = path.join(this.dir, `${encodedName}.json`); - log('WRITE %s', filePath); - fs.writeFileSync(filePath, bytesData.toString('utf8')); - } - catch (error) { - throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`); - } - } -} -exports.Updater = Updater; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js deleted file mode 100644 index 923eef6044bcc..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.withTempFile = void 0; -const promises_1 = __importDefault(require("fs/promises")); -const os_1 = __importDefault(require("os")); -const path_1 = __importDefault(require("path")); -// Invokes the given handler with the path to a temporary file. The file -// is deleted after the handler returns. -const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile'))); -exports.withTempFile = withTempFile; -// Invokes the given handler with a temporary directory. The directory is -// deleted after the handler returns. -const withTempDir = async (handler) => { - const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir()); - const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep); - try { - return await handler(dir); - } - finally { - await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 }); - } -}; diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js b/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js deleted file mode 100644 index 359d1f3ef385b..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.join = join; -const url_1 = require("url"); -function join(base, path) { - return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString(); -} -function ensureTrailingSlash(path) { - return path.endsWith('/') ? path : path + '/'; -} -function removeLeadingSlash(path) { - return path.startsWith('/') ? path.slice(1) : path; -} diff --git a/node_modules/pacote/node_modules/tuf-js/package.json b/node_modules/pacote/node_modules/tuf-js/package.json deleted file mode 100644 index e79a3d45f3f06..0000000000000 --- a/node_modules/pacote/node_modules/tuf-js/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "tuf-js", - "version": "3.0.1", - "description": "JavaScript implementation of The Update Framework (TUF)", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsc --build", - "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", - "test": "jest" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/theupdateframework/tuf-js.git" - }, - "files": [ - "dist" - ], - "keywords": [ - "tuf", - "security", - "update" - ], - "author": "bdehamer@github.com", - "license": "MIT", - "bugs": { - "url": "https://github.com/theupdateframework/tuf-js/issues" - }, - "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", - "devDependencies": { - "@tufjs/repo-mock": "3.0.1", - "@types/debug": "^4.1.12", - "@types/make-fetch-happen": "^10.0.4" - }, - "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.3.6", - "make-fetch-happen": "^14.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/LICENSE b/node_modules/sigstore/LICENSE similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/LICENSE rename to node_modules/sigstore/LICENSE diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/config.js b/node_modules/sigstore/dist/config.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/config.js rename to node_modules/sigstore/dist/config.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/index.js b/node_modules/sigstore/dist/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/index.js rename to node_modules/sigstore/dist/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/sigstore.js b/node_modules/sigstore/dist/sigstore.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/dist/sigstore.js rename to node_modules/sigstore/dist/sigstore.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/LICENSE b/node_modules/sigstore/node_modules/@sigstore/bundle/LICENSE similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/LICENSE rename to node_modules/sigstore/node_modules/@sigstore/bundle/LICENSE diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/build.js b/node_modules/sigstore/node_modules/@sigstore/bundle/dist/build.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/build.js rename to node_modules/sigstore/node_modules/@sigstore/bundle/dist/build.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/sigstore/node_modules/@sigstore/bundle/dist/bundle.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/bundle.js rename to node_modules/sigstore/node_modules/@sigstore/bundle/dist/bundle.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/bundle/dist/error.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/error.js rename to node_modules/sigstore/node_modules/@sigstore/bundle/dist/error.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/bundle/dist/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/index.js rename to node_modules/sigstore/node_modules/@sigstore/bundle/dist/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/sigstore/node_modules/@sigstore/bundle/dist/serialized.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/serialized.js rename to node_modules/sigstore/node_modules/@sigstore/bundle/dist/serialized.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/sigstore/node_modules/@sigstore/bundle/dist/utility.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/utility.js rename to node_modules/sigstore/node_modules/@sigstore/bundle/dist/utility.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/sigstore/node_modules/@sigstore/bundle/dist/validate.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/dist/validate.js rename to node_modules/sigstore/node_modules/@sigstore/bundle/dist/validate.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/package.json b/node_modules/sigstore/node_modules/@sigstore/bundle/package.json similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle/package.json rename to node_modules/sigstore/node_modules/@sigstore/bundle/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/LICENSE b/node_modules/sigstore/node_modules/@sigstore/core/LICENSE similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/LICENSE rename to node_modules/sigstore/node_modules/@sigstore/core/LICENSE diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/error.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/error.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/error.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/index.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/length.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/length.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/length.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/obj.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/obj.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/obj.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/parse.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/parse.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/parse.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/tag.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/asn1/tag.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/tag.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/crypto.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/crypto.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/crypto.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/crypto.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/dsse.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/dsse.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/dsse.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/dsse.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/encoding.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/encoding.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/encoding.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/encoding.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/index.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/json.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/json.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/json.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/json.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/oid.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/oid.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/oid.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/oid.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/pem.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/pem.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/pem.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/pem.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/error.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/error.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/error.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/index.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/timestamp.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/timestamp.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/timestamp.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/stream.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/stream.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/stream.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/stream.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/cert.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/cert.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/x509/cert.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/ext.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/ext.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/x509/ext.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/index.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/x509/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/sct.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/dist/x509/sct.js rename to node_modules/sigstore/node_modules/@sigstore/core/dist/x509/sct.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/package.json b/node_modules/sigstore/node_modules/@sigstore/core/package.json similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core/package.json rename to node_modules/sigstore/node_modules/@sigstore/core/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/LICENSE b/node_modules/sigstore/node_modules/@sigstore/sign/LICENSE similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf/LICENSE rename to node_modules/sigstore/node_modules/@sigstore/sign/LICENSE diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/base.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/base.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/base.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/bundle.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/dsse.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/dsse.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/dsse.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/message.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/bundler/message.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/message.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/error.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/error.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/error.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/error.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/error.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/external/error.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fetch.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fetch.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fetch.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fulcio.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/fulcio.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fulcio.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/rekor.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/rekor.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/external/rekor.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/tsa.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/external/tsa.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/external/tsa.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/ci.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/ci.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/ci.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/provider.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/identity/provider.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/provider.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/fulcio/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/signer.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/signer/signer.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/signer.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/types/fetch.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/types/fetch.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/types/fetch.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/oidc.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/oidc.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/util/oidc.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/ua.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/util/ua.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/util/ua.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/client.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/client.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/client.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/entry.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/entry.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/entry.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tlog/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/client.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/client.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/client.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/tsa/index.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/witness.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/dist/witness/witness.js rename to node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/witness.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/package.json b/node_modules/sigstore/node_modules/@sigstore/sign/package.json similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign/package.json rename to node_modules/sigstore/node_modules/@sigstore/sign/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/dsse.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/dsse.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/dsse.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/index.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/message.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/bundle/message.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/message.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/error.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/error.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/error.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/index.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/certificate.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/index.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/sct.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/key/sct.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/key/sct.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/policy.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/policy.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/policy.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/policy.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/shared.types.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/shared.types.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/shared.types.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/index.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/merkle.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/merkle.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/merkle.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/set.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/set.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/set.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/timestamp/tsa.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/dsse.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/dsse.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/dsse.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/index.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/intoto.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/tlog/intoto.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/intoto.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/filter.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/index.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/trust.types.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/trust/trust.types.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/trust.types.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/sigstore/node_modules/@sigstore/verify/dist/verifier.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/dist/verifier.js rename to node_modules/sigstore/node_modules/@sigstore/verify/dist/verifier.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/package.json b/node_modules/sigstore/node_modules/@sigstore/verify/package.json similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify/package.json rename to node_modules/sigstore/node_modules/@sigstore/verify/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/package.json b/node_modules/sigstore/package.json similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/sigstore/package.json rename to node_modules/sigstore/package.json diff --git a/node_modules/tuf-js/dist/config.js b/node_modules/tuf-js/dist/config.js index 6845679942fec..c66d76af86b98 100644 --- a/node_modules/tuf-js/dist/config.js +++ b/node_modules/tuf-js/dist/config.js @@ -2,7 +2,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.defaultConfig = void 0; exports.defaultConfig = { - maxRootRotations: 32, + maxRootRotations: 256, maxDelegations: 32, rootMaxLength: 512000, //bytes timestampMaxLength: 16384, // bytes diff --git a/node_modules/tuf-js/dist/updater.js b/node_modules/tuf-js/dist/updater.js index 5317f7e14659a..8d5eb4428f044 100644 --- a/node_modules/tuf-js/dist/updater.js +++ b/node_modules/tuf-js/dist/updater.js @@ -144,7 +144,7 @@ class Updater { const rootVersion = this.trustedSet.root.signed.version; const lowerBound = rootVersion + 1; const upperBound = lowerBound + this.config.maxRootRotations; - for (let version = lowerBound; version <= upperBound; version++) { + for (let version = lowerBound; version < upperBound; version++) { const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`); try { // Client workflow 5.3.3: download new root metadata file @@ -155,7 +155,13 @@ class Updater { this.persistMetadata(models_1.MetadataKind.Root, bytesData); } catch (error) { - break; + if (error instanceof error_1.DownloadHTTPError) { + // 404/403 means current root is newest available + if ([403, 404].includes(error.statusCode)) { + break; + } + } + throw error; } } } @@ -247,7 +253,8 @@ class Updater { const version = this.trustedSet.root.signed.consistentSnapshot ? metaInfo.version : undefined; - const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${role}.json` : `${role}.json`); + const encodedRole = encodeURIComponent(role); + const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`); try { // Client workflow 5.6.1: download targets metadata file const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength); @@ -280,7 +287,6 @@ class Updater { while (visitedRoleNames.size <= this.config.maxDelegations && delegationsToVisit.length > 0) { // Pop the role name from the top of the stack. - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const { roleName, parentRoleName } = delegationsToVisit.pop(); // Skip any visited current role to prevent cycles. // Client workflow 5.6.7.1: skip already-visited roles @@ -330,13 +336,14 @@ class Updater { return path.join(this.targetDir, filePath); } persistMetadata(metaDataName, bytesData) { + const encodedName = encodeURIComponent(metaDataName); try { - const filePath = path.join(this.dir, `${metaDataName}.json`); + const filePath = path.join(this.dir, `${encodedName}.json`); log('WRITE %s', filePath); fs.writeFileSync(filePath, bytesData.toString('utf8')); } catch (error) { - throw new error_1.PersistError(`Failed to persist metadata ${metaDataName} error: ${error}`); + throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`); } } } diff --git a/node_modules/tuf-js/dist/utils/url.js b/node_modules/tuf-js/dist/utils/url.js index ce67fe2c23053..359d1f3ef385b 100644 --- a/node_modules/tuf-js/dist/utils/url.js +++ b/node_modules/tuf-js/dist/utils/url.js @@ -1,11 +1,10 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.join = void 0; +exports.join = join; const url_1 = require("url"); function join(base, path) { return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString(); } -exports.join = join; function ensureTrailingSlash(path) { return path.endsWith('/') ? path : path + '/'; } diff --git a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js b/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js deleted file mode 100644 index c541b93001517..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const net = require('net') -const tls = require('tls') -const { once } = require('events') -const timers = require('timers/promises') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') -const Errors = require('./errors.js') -const { Agent: AgentBase } = require('agent-base') - -module.exports = class Agent extends AgentBase { - #options - #timeouts - #proxy - #noProxy - #ProxyAgent - - constructor (options = {}) { - const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) - - super(normalizedOptions) - - this.#options = normalizedOptions - this.#timeouts = timeouts - - if (proxy) { - this.#proxy = new URL(proxy) - this.#noProxy = noProxy - this.#ProxyAgent = getProxyAgent(proxy) - } - } - - get proxy () { - return this.#proxy ? { url: this.#proxy } : {} - } - - #getProxy (options) { - if (!this.#proxy) { - return - } - - const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { - proxy: this.#proxy, - noProxy: this.#noProxy, - }) - - if (!proxy) { - return - } - - const cacheKey = cacheOptions({ - ...options, - ...this.#options, - timeouts: this.#timeouts, - proxy, - }) - - if (proxyCache.has(cacheKey)) { - return proxyCache.get(cacheKey) - } - - let ProxyAgent = this.#ProxyAgent - if (Array.isArray(ProxyAgent)) { - ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] - } - - const proxyAgent = new ProxyAgent(proxy, { - ...this.#options, - socketOptions: { family: this.#options.family }, - }) - proxyCache.set(cacheKey, proxyAgent) - - return proxyAgent - } - - // takes an array of promises and races them against the connection timeout - // which will throw the necessary error if it is hit. This will return the - // result of the promise race. - async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { - if (timeout) { - const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) - .then(() => { - throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) - }).catch((err) => { - if (err.name === 'AbortError') { - return - } - throw err - }) - promises.push(connectionTimeout) - } - - let result - try { - result = await Promise.race(promises) - ac.abort() - } catch (err) { - ac.abort() - throw err - } - return result - } - - async connect (request, options) { - // if the connection does not have its own lookup function - // set, then use the one from our options - options.lookup ??= this.#options.lookup - - let socket - let timeout = this.#timeouts.connection - const isSecureEndpoint = this.isSecureEndpoint(options) - - const proxy = this.#getProxy(options) - if (proxy) { - // some of the proxies will wait for the socket to fully connect before - // returning so we have to await this while also racing it against the - // connection timeout. - const start = Date.now() - socket = await this.#timeoutConnection({ - options, - timeout, - promises: [proxy.connect(request, options)], - }) - // see how much time proxy.connect took and subtract it from - // the timeout - if (timeout) { - timeout = timeout - (Date.now() - start) - } - } else { - socket = (isSecureEndpoint ? tls : net).connect(options) - } - - socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) - socket.setNoDelay(this.keepAlive) - - const abortController = new AbortController() - const { signal } = abortController - - const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] - ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) - : Promise.resolve() - - await this.#timeoutConnection({ - options, - timeout, - promises: [ - connectPromise, - once(socket, 'error', { signal }).then((err) => { - throw err[0] - }), - ], - }, abortController) - - if (this.#timeouts.idle) { - socket.setTimeout(this.#timeouts.idle, () => { - socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) - }) - } - - return socket - } - - addRequest (request, options) { - const proxy = this.#getProxy(options) - // it would be better to call proxy.addRequest here but this causes the - // http-proxy-agent to call its super.addRequest which causes the request - // to be added to the agent twice. since we only support 3 agents - // currently (see the required agents in proxy.js) we have manually - // checked that the only public methods we need to call are called in the - // next block. this could change in the future and presumably we would get - // failing tests until we have properly called the necessary methods on - // each of our proxy agents - if (proxy?.setRequestProps) { - proxy.setRequestProps(request, options) - } - - request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') - - if (this.#timeouts.response) { - let responseTimeout - request.once('finish', () => { - setTimeout(() => { - request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) - }, this.#timeouts.response) - }) - request.once('response', () => { - clearTimeout(responseTimeout) - }) - } - - if (this.#timeouts.transfer) { - let transferTimeout - request.once('response', (res) => { - setTimeout(() => { - res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) - }, this.#timeouts.transfer) - res.once('close', () => { - clearTimeout(transferTimeout) - }) - }) - } - - return super.addRequest(request, options) - } -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js b/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js deleted file mode 100644 index 3c6946c566d73..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const dns = require('dns') - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -const cache = new LRUCache({ max: 50 }) - -const getOptions = ({ - family = 0, - hints = dns.ADDRCONFIG, - all = false, - verbatim = undefined, - ttl = 5 * 60 * 1000, - lookup = dns.lookup, -}) => ({ - // hints and lookup are returned since both are top level properties to (net|tls).connect - hints, - lookup: (hostname, ...args) => { - const callback = args.pop() // callback is always last arg - const lookupOptions = args[0] ?? {} - - const options = { - family, - hints, - all, - verbatim, - ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), - } - - const key = JSON.stringify({ hostname, ...options }) - - if (cache.has(key)) { - const cached = cache.get(key) - return process.nextTick(callback, null, ...cached) - } - - lookup(hostname, options, (err, ...result) => { - if (err) { - return callback(err) - } - - cache.set(key, result, { ttl }) - return callback(null, ...result) - }) - }, -}) - -module.exports = { - cache, - getOptions, -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js b/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js deleted file mode 100644 index 70475aec8eb35..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict' - -class InvalidProxyProtocolError extends Error { - constructor (url) { - super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) - this.code = 'EINVALIDPROXY' - this.proxy = url - } -} - -class ConnectionTimeoutError extends Error { - constructor (host) { - super(`Timeout connecting to host \`${host}\``) - this.code = 'ECONNECTIONTIMEOUT' - this.host = host - } -} - -class IdleTimeoutError extends Error { - constructor (host) { - super(`Idle timeout reached for host \`${host}\``) - this.code = 'EIDLETIMEOUT' - this.host = host - } -} - -class ResponseTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Response timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `connecting to host \`${request.host}\`` - super(msg) - this.code = 'ERESPONSETIMEOUT' - this.proxy = proxy - this.request = request - } -} - -class TransferTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Transfer timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `for \`${request.host}\`` - super(msg) - this.code = 'ETRANSFERTIMEOUT' - this.proxy = proxy - this.request = request - } -} - -module.exports = { - InvalidProxyProtocolError, - ConnectionTimeoutError, - IdleTimeoutError, - ResponseTimeoutError, - TransferTimeoutError, -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js b/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js deleted file mode 100644 index b33d6eaef07a2..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, proxyCache } = require('./proxy.js') -const dns = require('./dns.js') -const Agent = require('./agents.js') - -const agentCache = new LRUCache({ max: 20 }) - -const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { - // false has meaning so this can't be a simple truthiness check - if (agent != null) { - return agent - } - - url = new URL(url) - - const proxyForUrl = getProxy(url, { proxy, noProxy }) - const normalizedOptions = { - ...normalizeOptions(options), - proxy: proxyForUrl, - } - - const cacheKey = cacheOptions({ - ...normalizedOptions, - secureEndpoint: url.protocol === 'https:', - }) - - if (agentCache.has(cacheKey)) { - return agentCache.get(cacheKey) - } - - const newAgent = new Agent(normalizedOptions) - agentCache.set(cacheKey, newAgent) - - return newAgent -} - -module.exports = { - getAgent, - Agent, - // these are exported for backwards compatability - HttpAgent: Agent, - HttpsAgent: Agent, - cache: { - proxy: proxyCache, - agent: agentCache, - dns: dns.cache, - clear: () => { - proxyCache.clear() - agentCache.clear() - dns.cache.clear() - }, - }, -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js b/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js deleted file mode 100644 index 0bf53f725f084..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict' - -const dns = require('./dns') - -const normalizeOptions = (opts) => { - const family = parseInt(opts.family ?? '0', 10) - const keepAlive = opts.keepAlive ?? true - - const normalized = { - // nodejs http agent options. these are all the defaults - // but kept here to increase the likelihood of cache hits - // https://nodejs.org/api/http.html#new-agentoptions - keepAliveMsecs: keepAlive ? 1000 : undefined, - maxSockets: opts.maxSockets ?? 15, - maxTotalSockets: Infinity, - maxFreeSockets: keepAlive ? 256 : undefined, - scheduling: 'fifo', - // then spread the rest of the options - ...opts, - // we already set these to their defaults that we want - family, - keepAlive, - // our custom timeout options - timeouts: { - // the standard timeout option is mapped to our idle timeout - // and then deleted below - idle: opts.timeout ?? 0, - connection: 0, - response: 0, - transfer: 0, - ...opts.timeouts, - }, - // get the dns options that go at the top level of socket connection - ...dns.getOptions({ family, ...opts.dns }), - } - - // remove timeout since we already used it to set our own idle timeout - delete normalized.timeout - - return normalized -} - -const createKey = (obj) => { - let key = '' - const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) - for (let [k, v] of sorted) { - if (v == null) { - v = 'null' - } else if (v instanceof URL) { - v = v.toString() - } else if (typeof v === 'object') { - v = createKey(v) - } - key += `${k}:${v}:` - } - return key -} - -const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ - secureEndpoint: !!secureEndpoint, - // socket connect options - family: options.family, - hints: options.hints, - localAddress: options.localAddress, - // tls specific connect options - strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, - ca: secureEndpoint ? options.ca : null, - cert: secureEndpoint ? options.cert : null, - key: secureEndpoint ? options.key : null, - // http agent options - keepAlive: options.keepAlive, - keepAliveMsecs: options.keepAliveMsecs, - maxSockets: options.maxSockets, - maxTotalSockets: options.maxTotalSockets, - maxFreeSockets: options.maxFreeSockets, - scheduling: options.scheduling, - // timeout options - timeouts: options.timeouts, - // proxy - proxy: options.proxy, -}) - -module.exports = { - normalizeOptions, - cacheOptions, -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js deleted file mode 100644 index 6272e929e57bc..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict' - -const { HttpProxyAgent } = require('http-proxy-agent') -const { HttpsProxyAgent } = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -const { LRUCache } = require('lru-cache') -const { InvalidProxyProtocolError } = require('./errors.js') - -const PROXY_CACHE = new LRUCache({ max: 20 }) - -const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) - -const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) - -const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { - key = key.toLowerCase() - if (PROXY_ENV_KEYS.has(key)) { - acc[key] = value - } - return acc -}, {}) - -const getProxyAgent = (url) => { - url = new URL(url) - - const protocol = url.protocol.slice(0, -1) - if (SOCKS_PROTOCOLS.has(protocol)) { - return SocksProxyAgent - } - if (protocol === 'https' || protocol === 'http') { - return [HttpProxyAgent, HttpsProxyAgent] - } - - throw new InvalidProxyProtocolError(url) -} - -const isNoProxy = (url, noProxy) => { - if (typeof noProxy === 'string') { - noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) - } - - if (!noProxy || !noProxy.length) { - return false - } - - const hostSegments = url.hostname.split('.').reverse() - - return noProxy.some((no) => { - const noSegments = no.split('.').filter(Boolean).reverse() - if (!noSegments.length) { - return false - } - - for (let i = 0; i < noSegments.length; i++) { - if (hostSegments[i] !== noSegments[i]) { - return false - } - } - - return true - }) -} - -const getProxy = (url, { proxy, noProxy }) => { - url = new URL(url) - - if (!proxy) { - proxy = url.protocol === 'https:' - ? PROXY_ENV.https_proxy - : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy - } - - if (!noProxy) { - noProxy = PROXY_ENV.no_proxy - } - - if (!proxy || isNoProxy(url, noProxy)) { - return null - } - - return new URL(proxy) -} - -module.exports = { - getProxyAgent, - getProxy, - proxyCache: PROXY_CACHE, -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/agent/package.json b/node_modules/tuf-js/node_modules/@npmcli/agent/package.json deleted file mode 100644 index ef5b4e3228cc4..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/agent/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "name": "@npmcli/agent", - "version": "2.2.2", - "description": "the http/https agent used by the npm cli", - "main": "lib/index.js", - "scripts": { - "gencerts": "bash scripts/create-cert.sh", - "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/agent/issues" - }, - "homepage": "https://github.com/npm/agent#readme", - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", - "publish": "true" - }, - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", - "minipass-fetch": "^3.0.3", - "nock": "^13.2.7", - "semver": "^7.5.4", - "simple-socks": "^3.1.0", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/agent.git" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md b/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md deleted file mode 100644 index 5fc208ff122e0..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js deleted file mode 100644 index cb5982f79077a..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js +++ /dev/null @@ -1,20 +0,0 @@ -// given an input that may or may not be an object, return an object that has -// a copy of every defined property listed in 'copy'. if the input is not an -// object, assign it to the property named by 'wrap' -const getOptions = (input, { copy, wrap }) => { - const result = {} - - if (input && typeof input === 'object') { - for (const prop of copy) { - if (input[prop] !== undefined) { - result[prop] = input[prop] - } - } - } else { - result[wrap] = input - } - - return result -} - -module.exports = getOptions diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js deleted file mode 100644 index 4d13bc037359d..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js +++ /dev/null @@ -1,9 +0,0 @@ -const semver = require('semver') - -const satisfies = (range) => { - return semver.satisfies(process.version, range, { includePrerelease: true }) -} - -module.exports = { - satisfies, -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE deleted file mode 100644 index 93546dfb7655b..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js deleted file mode 100644 index 1cd1e05d0c533..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js +++ /dev/null @@ -1,129 +0,0 @@ -'use strict' -const { inspect } = require('util') - -// adapted from node's internal/errors -// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js - -// close copy of node's internal SystemError class. -class SystemError { - constructor (code, prefix, context) { - // XXX context.code is undefined in all constructors used in cp/polyfill - // that may be a bug copied from node, maybe the constructor should use - // `code` not `errno`? nodejs/node#41104 - let message = `${prefix}: ${context.syscall} returned ` + - `${context.code} (${context.message})` - - if (context.path !== undefined) { - message += ` ${context.path}` - } - if (context.dest !== undefined) { - message += ` => ${context.dest}` - } - - this.code = code - Object.defineProperties(this, { - name: { - value: 'SystemError', - enumerable: false, - writable: true, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - info: { - value: context, - enumerable: true, - configurable: true, - writable: false, - }, - errno: { - get () { - return context.errno - }, - set (value) { - context.errno = value - }, - enumerable: true, - configurable: true, - }, - syscall: { - get () { - return context.syscall - }, - set (value) { - context.syscall = value - }, - enumerable: true, - configurable: true, - }, - }) - - if (context.path !== undefined) { - Object.defineProperty(this, 'path', { - get () { - return context.path - }, - set (value) { - context.path = value - }, - enumerable: true, - configurable: true, - }) - } - - if (context.dest !== undefined) { - Object.defineProperty(this, 'dest', { - get () { - return context.dest - }, - set (value) { - context.dest = value - }, - enumerable: true, - configurable: true, - }) - } - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } - - [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { - return inspect(this, { - ...ctx, - getters: true, - customInspect: false, - }) - } -} - -function E (code, message) { - module.exports[code] = class NodeError extends SystemError { - constructor (ctx) { - super(code, message, ctx) - } - } -} - -E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') -E('ERR_FS_CP_EEXIST', 'Target already exists') -E('ERR_FS_CP_EINVAL', 'Invalid src or dest') -E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') -E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') -E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') -E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') -E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') -E('ERR_FS_EISDIR', 'Path is a directory') - -module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { - constructor (name, expected, actual) { - super() - this.code = 'ERR_INVALID_ARG_TYPE' - this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` - } -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js deleted file mode 100644 index 972ce7aa12abe..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('fs/promises') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 16.7.0 added fs.cp -const useNative = node.satisfies('>=16.7.0') - -const cp = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.cp(src, dest, options) - : polyfill(src, dest, options) -} - -module.exports = cp diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js deleted file mode 100644 index 80eb10de97191..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js +++ /dev/null @@ -1,428 +0,0 @@ -// this file is a modified version of the code in node 17.2.0 -// which is, in turn, a modified version of the fs-extra module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -'use strict' - -const { - ERR_FS_CP_DIR_TO_NON_DIR, - ERR_FS_CP_EEXIST, - ERR_FS_CP_EINVAL, - ERR_FS_CP_FIFO_PIPE, - ERR_FS_CP_NON_DIR_TO_DIR, - ERR_FS_CP_SOCKET, - ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, - ERR_FS_CP_UNKNOWN, - ERR_FS_EISDIR, - ERR_INVALID_ARG_TYPE, -} = require('./errors.js') -const { - constants: { - errno: { - EEXIST, - EISDIR, - EINVAL, - ENOTDIR, - }, - }, -} = require('os') -const { - chmod, - copyFile, - lstat, - mkdir, - readdir, - readlink, - stat, - symlink, - unlink, - utimes, -} = require('fs/promises') -const { - dirname, - isAbsolute, - join, - parse, - resolve, - sep, - toNamespacedPath, -} = require('path') -const { fileURLToPath } = require('url') - -const defaultOptions = { - dereference: false, - errorOnExist: false, - filter: undefined, - force: true, - preserveTimestamps: false, - recursive: false, -} - -async function cp (src, dest, opts) { - if (opts != null && typeof opts !== 'object') { - throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) - } - return cpFn( - toNamespacedPath(getValidatedPath(src)), - toNamespacedPath(getValidatedPath(dest)), - { ...defaultOptions, ...opts }) -} - -function getValidatedPath (fileURLOrPath) { - const path = fileURLOrPath != null && fileURLOrPath.href - && fileURLOrPath.origin - ? fileURLToPath(fileURLOrPath) - : fileURLOrPath - return path -} - -async function cpFn (src, dest, opts) { - // Warn about using preserveTimestamps on 32-bit node - // istanbul ignore next - if (opts.preserveTimestamps && process.arch === 'ia32') { - const warning = 'Using the preserveTimestamps option in 32-bit ' + - 'node is not recommended' - process.emitWarning(warning, 'TimestampPrecisionWarning') - } - const stats = await checkPaths(src, dest, opts) - const { srcStat, destStat } = stats - await checkParentPaths(src, srcStat, dest) - if (opts.filter) { - return handleFilter(checkParentDir, destStat, src, dest, opts) - } - return checkParentDir(destStat, src, dest, opts) -} - -async function checkPaths (src, dest, opts) { - const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: 'src and dest cannot be the same', - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new ERR_FS_CP_DIR_TO_NON_DIR({ - message: `cannot overwrite directory ${src} ` + - `with non-directory ${dest}`, - path: dest, - syscall: 'cp', - errno: EISDIR, - }) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new ERR_FS_CP_NON_DIR_TO_DIR({ - message: `cannot overwrite non-directory ${src} ` + - `with directory ${dest}`, - path: dest, - syscall: 'cp', - errno: ENOTDIR, - }) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return { srcStat, destStat } -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && - destStat.dev === srcStat.dev -} - -function getStats (src, dest, opts) { - const statFunc = opts.dereference ? - (file) => stat(file, { bigint: true }) : - (file) => lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch((err) => { - // istanbul ignore next: unsure how to cover. - if (err.code === 'ENOENT') { - return null - } - // istanbul ignore next: unsure how to cover. - throw err - }), - ]) -} - -async function checkParentDir (destStat, src, dest, opts) { - const destParent = dirname(dest) - const dirExists = await pathExists(destParent) - if (dirExists) { - return getStatsForCopy(destStat, src, dest, opts) - } - await mkdir(destParent, { recursive: true }) - return getStatsForCopy(destStat, src, dest, opts) -} - -function pathExists (dest) { - return stat(dest).then( - () => true, - // istanbul ignore next: not sure when this would occur - (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) -} - -// Recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest) { - const srcParent = resolve(dirname(src)) - const destParent = resolve(dirname(dest)) - if (destParent === srcParent || destParent === parse(destParent).root) { - return - } - let destStat - try { - destStat = await stat(destParent, { bigint: true }) - } catch (err) { - // istanbul ignore else: not sure when this would occur - if (err.code === 'ENOENT') { - return - } - // istanbul ignore next: not sure when this would occur - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return checkParentPaths(src, srcStat, destParent) -} - -const normalizePathToArray = (path) => - resolve(path).split(sep).filter(Boolean) - -// Return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = normalizePathToArray(src) - const destArr = normalizePathToArray(dest) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -async function handleFilter (onInclude, destStat, src, dest, opts, cb) { - const include = await opts.filter(src, dest) - if (include) { - return onInclude(destStat, src, dest, opts, cb) - } -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter) { - return handleFilter(getStatsForCopy, destStat, src, dest, opts) - } - return getStatsForCopy(destStat, src, dest, opts) -} - -async function getStatsForCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? stat : lstat - const srcStat = await statFn(src) - // istanbul ignore else: can't portably test FIFO - if (srcStat.isDirectory() && opts.recursive) { - return onDir(srcStat, destStat, src, dest, opts) - } else if (srcStat.isDirectory()) { - throw new ERR_FS_EISDIR({ - message: `${src} is a directory (not copied)`, - path: src, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) { - return onFile(srcStat, destStat, src, dest, opts) - } else if (srcStat.isSymbolicLink()) { - return onLink(destStat, src, dest) - } else if (srcStat.isSocket()) { - throw new ERR_FS_CP_SOCKET({ - message: `cannot copy a socket file: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFIFO()) { - throw new ERR_FS_CP_FIFO_PIPE({ - message: `cannot copy a FIFO pipe: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // istanbul ignore next: should be unreachable - throw new ERR_FS_CP_UNKNOWN({ - message: `cannot copy an unknown file type: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return _copyFile(srcStat, src, dest, opts) - } - return mayCopyFile(srcStat, src, dest, opts) -} - -async function mayCopyFile (srcStat, src, dest, opts) { - if (opts.force) { - await unlink(dest) - return _copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new ERR_FS_CP_EEXIST({ - message: `${dest} already exists`, - path: dest, - syscall: 'cp', - errno: EEXIST, - }) - } -} - -async function _copyFile (srcStat, src, dest, opts) { - await copyFile(src, dest) - if (opts.preserveTimestamps) { - return handleTimestampsAndMode(srcStat.mode, src, dest) - } - return setDestMode(dest, srcStat.mode) -} - -async function handleTimestampsAndMode (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - await makeFileWritable(dest, srcMode) - return setDestTimestampsAndMode(srcMode, src, dest) - } - return setDestTimestampsAndMode(srcMode, src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -async function setDestTimestampsAndMode (srcMode, src, dest) { - await setDestTimestamps(src, dest) - return setDestMode(dest, srcMode) -} - -function setDestMode (dest, srcMode) { - return chmod(dest, srcMode) -} - -async function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await stat(src) - return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return mkDirAndCopy(srcStat.mode, src, dest, opts) - } - return copyDir(src, dest, opts) -} - -async function mkDirAndCopy (srcMode, src, dest, opts) { - await mkdir(dest) - await copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -async function copyDir (src, dest, opts) { - const dir = await readdir(src) - for (let i = 0; i < dir.length; i++) { - const item = dir[i] - const srcItem = join(src, item) - const destItem = join(dest, item) - const { destStat } = await checkPaths(srcItem, destItem, opts) - await startCopy(destStat, srcItem, destItem, opts) - } -} - -async function onLink (destStat, src, dest) { - let resolvedSrc = await readlink(src) - if (!isAbsolute(resolvedSrc)) { - resolvedSrc = resolve(dirname(src), resolvedSrc) - } - if (!destStat) { - return symlink(resolvedSrc, dest) - } - let resolvedDest - try { - resolvedDest = await readlink(dest) - } catch (err) { - // Dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - // istanbul ignore next: can only test on windows - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { - return symlink(resolvedSrc, dest) - } - // istanbul ignore next: should not be possible - throw err - } - if (!isAbsolute(resolvedDest)) { - resolvedDest = resolve(dirname(dest), resolvedDest) - } - if (isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + - `${resolvedDest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // Do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - const srcStat = await stat(src) - if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ - message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return copyLink(resolvedSrc, dest) -} - -async function copyLink (resolvedSrc, dest) { - await unlink(dest) - return symlink(resolvedSrc, dest) -} - -module.exports = cp diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js deleted file mode 100644 index 81c746304cc42..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const cp = require('./cp/index.js') -const withTempDir = require('./with-temp-dir.js') -const readdirScoped = require('./readdir-scoped.js') -const moveFile = require('./move-file.js') - -module.exports = { - cp, - withTempDir, - readdirScoped, - moveFile, -} diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js deleted file mode 100644 index d56e06d384659..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js +++ /dev/null @@ -1,78 +0,0 @@ -const { dirname, join, resolve, relative, isAbsolute } = require('path') -const fs = require('fs/promises') - -const pathExists = async path => { - try { - await fs.access(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && await pathExists(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - await fs.mkdir(dirname(destination), { recursive: true }) - - try { - await fs.rename(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = await fs.lstat(source) - if (sourceStat.isDirectory()) { - const files = await fs.readdir(source) - await Promise.all(files.map((file) => - moveFile(join(source, file), join(destination, file), options, false, symlinks) - )) - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - await fs.copyFile(source, destination) - } - } else { - throw error - } - } - - if (root) { - await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { - let target = await fs.readlink(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = await fs.stat(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - await fs.symlink( - target, - symDestination, - targetStat - ) - })) - await fs.rm(source, { recursive: true, force: true }) - } -} - -module.exports = moveFile diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js deleted file mode 100644 index cd601dfbe7486..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js +++ /dev/null @@ -1,20 +0,0 @@ -const { readdir } = require('fs/promises') -const { join } = require('path') - -const readdirScoped = async (dir) => { - const results = [] - - for (const item of await readdir(dir)) { - if (item.startsWith('@')) { - for (const scopedItem of await readdir(join(dir, item))) { - results.push(join(item, scopedItem)) - } - } else { - results.push(item) - } - } - - return results -} - -module.exports = readdirScoped diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js deleted file mode 100644 index 0738ac4f29e1b..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js +++ /dev/null @@ -1,39 +0,0 @@ -const { join, sep } = require('path') - -const getOptions = require('./common/get-options.js') -const { mkdir, mkdtemp, rm } = require('fs/promises') - -// create a temp directory, ensure its permissions match its parent, then call -// the supplied function passing it the path to the directory. clean up after -// the function finishes, whether it throws or not -const withTempDir = async (root, fn, opts) => { - const options = getOptions(opts, { - copy: ['tmpPrefix'], - }) - // create the directory - await mkdir(root, { recursive: true }) - - const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) - let err - let result - - try { - result = await fn(target) - } catch (_err) { - err = _err - } - - try { - await rm(target, { force: true, recursive: true }) - } catch { - // ignore errors - } - - if (err) { - throw err - } - - return result -} - -module.exports = withTempDir diff --git a/node_modules/tuf-js/node_modules/@npmcli/fs/package.json b/node_modules/tuf-js/node_modules/@npmcli/fs/package.json deleted file mode 100644 index 5261a11b78000..0000000000000 --- a/node_modules/tuf-js/node_modules/@npmcli/fs/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "@npmcli/fs", - "version": "3.1.1", - "description": "filesystem utilities for the npm cli", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "snap": "tap", - "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/fs.git" - }, - "keywords": [ - "npm", - "oss" - ], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.1" - }, - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/LICENSE b/node_modules/tuf-js/node_modules/@tufjs/models/LICENSE similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/LICENSE rename to node_modules/tuf-js/node_modules/@tufjs/models/LICENSE diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/base.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/base.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/delegations.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/delegations.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/error.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/error.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/file.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/file.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/index.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/index.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/key.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/key.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/metadata.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/metadata.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/role.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/role.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/root.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/root.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/signature.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/signature.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/snapshot.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/targets.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/targets.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/timestamp.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/guard.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/index.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/key.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/oid.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/types.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/dist/utils/verify.js rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/package.json b/node_modules/tuf-js/node_modules/@tufjs/models/package.json similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models/package.json rename to node_modules/tuf-js/node_modules/@tufjs/models/package.json diff --git a/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/node_modules/tuf-js/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/node_modules/tuf-js/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/node_modules/tuf-js/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 5f6192c3cec56..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,165 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - - if (stat.size !== data.length) { - throw sizeError(stat.size, data.length) - } - - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath) => { - return fs.copyFile(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index ce58d679e4cb2..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const contentPath = require('./path') -const { hasContent } = require('./read') - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) - return true - } else { - return false - } -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/node_modules/tuf-js/node_modules/cacache/lib/content/write.js deleted file mode 100644 index e7187abca8788..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const events = require('events') - -const contentPath = require('./path') -const fs = require('fs/promises') -const { moveFile } = require('@npmcli/fs') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -// Cache of move operations in process so we don't duplicate -const moveOperations = new Map() - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - for (const algo in sri) { - const tmp = await makeTmp(cache, opts) - const hash = sri[algo].toString() - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, hash, opts) - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } - } - return { integrity: sri, size: data.length } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - this.handleContentP.catch(error => this.emit('error', error)) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - if (moveOperations.has(destination)) { - return moveOperations.get(destination) - } - moveOperations.set( - destination, - fs.mkdir(destDir, { recursive: true }) - .then(async () => { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - return tmp.moved - }) - .catch(err => { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } - }).finally(() => { - moveOperations.delete(destination) - }) - - ) - return moveOperations.get(destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 89c28f2f257d4..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,336 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { - appendFile, - mkdir, - readFile, - readdir, - rm, - writeFile, -} = require('fs/promises') -const { Minipass } = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const { moveFile } = require('@npmcli/fs') - -const pMap = require('p-map') -const lsStreamConcurrency = 5 - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await mkdir(path.dirname(target), { recursive: true }) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rm(tmp.target, { recursive: true, force: true }) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await mkdir(path.dirname(bucket), { recursive: true }) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size, time } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: time || Date.now(), - size, - metadata, - } - try { - await mkdir(path.dirname(bucket), { recursive: true }) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rm(bucket, { recursive: true, force: true }) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await pMap(buckets, async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await pMap(subbuckets, async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await pMap(subbucketEntries, async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (_) { - // eslint-ignore-next-line no-empty-block - } - // coverage disabled here, no need to test with an entry that parses to something falsey - // istanbul ignore else - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/get.js b/node_modules/tuf-js/node_modules/cacache/lib/get.js deleted file mode 100644 index 80ec206c7ecaa..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/tuf-js/node_modules/cacache/lib/index.js b/node_modules/tuf-js/node_modules/cacache/lib/index.js deleted file mode 100644 index c9b0da5f3a271..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/node_modules/tuf-js/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 2ecc60912e456..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') - -const MEMOIZED = new LRUCache({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/put.js b/node_modules/tuf-js/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/rm.js deleted file mode 100644 index a94760c7cf243..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const { rm } = require('fs/promises') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -async function all (cache) { - memo.clearMemoized() - const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) - return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js deleted file mode 100644 index 8500c1c16a429..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const { glob } = require('glob') -const path = require('path') - -const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) -module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0bf5302136ebe..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const { withTempDir } = require('@npmcli/fs') -const fs = require('fs/promises') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return withTempDir(path.join(cache, 'tmp'), cb, opts) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/node_modules/tuf-js/node_modules/cacache/lib/verify.js deleted file mode 100644 index d7423da1295b6..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const { - mkdir, - readFile, - rm, - stat, - truncate, - writeFile, -} = require('fs/promises') -const pMap = require('p-map') -const contentPath = require('./content/path') -const fsm = require('fs-minipass') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const path = require('path') -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime () { - return { startTime: new Date() } -} - -async function markEndTime () { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await mkdir(cache, { recursive: true }) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rm it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - // integrity is stringified, re-parse it so we can get each hash - const integrity = ssri.parse(entry.integrity) - for (const algo in integrity) { - liveContent.add(integrity[algo].toString()) - } - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await stat(f) - await rm(f, { recursive: true, force: true }) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rm(filepath, { recursive: true, force: true }) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats) { - await truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - time: entry.time, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) -} - -async function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - return writeFile(verifile, `${Date.now()}`) -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/tuf-js/node_modules/cacache/package.json b/node_modules/tuf-js/node_modules/cacache/package.json deleted file mode 100644 index 6e6219158ed75..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "cacache", - "version": "18.0.4", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "4.22.0", - "publish": "true" - }, - "author": "GitHub Inc.", - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2844231..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index bfcfacbcc95e1..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,471 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const { Minipass } = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - for (const name of options.cacheAdditionalHeaders) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }).catch((err) => { - body.emit('error', err) - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - for (const name of options.cacheAdditionalHeaders) { - const inMeta = hasOwnProperty(metadata.resHeaders, name) - const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) - const inPolicy = hasOwnProperty(this.policy.response.headers, name) - - // if the header is in the existing entry, but it is not in the metadata - // then we need to write it to the metadata as this will refresh the on-disk cache - if (!inMeta && inEntry) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - // if the header is in the metadata, but not in the policy, then we need to set - // it in the policy so that it's included in the immediate response. future - // responses will load a new cache entry, so we don't need to change that - if (!inPolicy && inMeta) { - this.policy.response.headers[name] = metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a66573bebe6..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d23fb933..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d562b7fa..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c8600dae9..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67e16550..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e1b6113..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index f77511279f831..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,54 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b2d0ce3..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index 8554564074de6..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,131 +0,0 @@ -const { Minipass } = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') -const { log } = require('proc-log') - -const CachingMinipassPipeline = require('./pipeline.js') -const { getAgent } = require('@npmcli/agent') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - // from @npmcli/agent - 'ECONNECTIONTIMEOUT', - 'EIDLETIMEOUT', - 'ERESPONSETIMEOUT', - 'ETRANSFERTIMEOUT', - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) - // EINVALIDPROXY // invalid protocol from @npmcli/agent - // EINVALIDRESPONSE // invalid status code from @npmcli/agent -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - /* eslint-disable-next-line max-len */ - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/node_modules/tuf-js/node_modules/make-fetch-happen/package.json deleted file mode 100644 index 7adb4d1e7f971..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "13.0.1", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", - "nock": "^13.2.4", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", - "publish": "true" - } -} diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE b/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE deleted file mode 100644 index 3c3410cdc12ee..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Isaac Z. Schlueter and Contributors -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - ---- - -Note: This is a derivative work based on "node-fetch" by David Frank, -modified and distributed under the terms of the MIT license above. -https://github.com/bitinn/node-fetch diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js b/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js deleted file mode 100644 index b18f643269e37..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' -class AbortError extends Error { - constructor (message) { - super(message) - this.code = 'FETCH_ABORTED' - this.type = 'aborted' - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'AbortError' - } - - // don't allow name to be overridden, but don't throw either - set name (s) {} -} -module.exports = AbortError diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js b/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js deleted file mode 100644 index 121b1730102e7..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js +++ /dev/null @@ -1,97 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const TYPE = Symbol('type') -const BUFFER = Symbol('buffer') - -class Blob { - constructor (blobParts, options) { - this[TYPE] = '' - - const buffers = [] - let size = 0 - - if (blobParts) { - const a = blobParts - const length = Number(a.length) - for (let i = 0; i < length; i++) { - const element = a[i] - const buffer = element instanceof Buffer ? element - : ArrayBuffer.isView(element) - ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) - : element instanceof ArrayBuffer ? Buffer.from(element) - : element instanceof Blob ? element[BUFFER] - : typeof element === 'string' ? Buffer.from(element) - : Buffer.from(String(element)) - size += buffer.length - buffers.push(buffer) - } - } - - this[BUFFER] = Buffer.concat(buffers, size) - - const type = options && options.type !== undefined - && String(options.type).toLowerCase() - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type - } - } - - get size () { - return this[BUFFER].length - } - - get type () { - return this[TYPE] - } - - text () { - return Promise.resolve(this[BUFFER].toString()) - } - - arrayBuffer () { - const buf = this[BUFFER] - const off = buf.byteOffset - const len = buf.byteLength - const ab = buf.buffer.slice(off, off + len) - return Promise.resolve(ab) - } - - stream () { - return new Minipass().end(this[BUFFER]) - } - - slice (start, end, type) { - const size = this.size - const relativeStart = start === undefined ? 0 - : start < 0 ? Math.max(size + start, 0) - : Math.min(start, size) - const relativeEnd = end === undefined ? size - : end < 0 ? Math.max(size + end, 0) - : Math.min(end, size) - const span = Math.max(relativeEnd - relativeStart, 0) - - const buffer = this[BUFFER] - const slicedBuffer = buffer.slice( - relativeStart, - relativeStart + span - ) - const blob = new Blob([], { type }) - blob[BUFFER] = slicedBuffer - return blob - } - - get [Symbol.toStringTag] () { - return 'Blob' - } - - static get BUFFER () { - return BUFFER - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, -}) - -module.exports = Blob diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js b/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js deleted file mode 100644 index 62286bd1de0d9..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js +++ /dev/null @@ -1,350 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const MinipassSized = require('minipass-sized') - -const Blob = require('./blob.js') -const { BUFFER } = Blob -const FetchError = require('./fetch-error.js') - -// optional dependency on 'encoding' -let convert -try { - convert = require('encoding').convert -} catch (e) { - // defer error until textConverted is called -} - -const INTERNALS = Symbol('Body internals') -const CONSUME_BODY = Symbol('consumeBody') - -class Body { - constructor (bodyArg, options = {}) { - const { size = 0, timeout = 0 } = options - const body = bodyArg === undefined || bodyArg === null ? null - : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) - : isBlob(bodyArg) ? bodyArg - : Buffer.isBuffer(bodyArg) ? bodyArg - : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' - ? Buffer.from(bodyArg) - : ArrayBuffer.isView(bodyArg) - ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) - : Minipass.isStream(bodyArg) ? bodyArg - : Buffer.from(String(bodyArg)) - - this[INTERNALS] = { - body, - disturbed: false, - error: null, - } - - this.size = size - this.timeout = timeout - - if (Minipass.isStream(body)) { - body.on('error', er => { - const error = er.name === 'AbortError' ? er - : new FetchError(`Invalid response while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - this[INTERNALS].error = error - }) - } - } - - get body () { - return this[INTERNALS].body - } - - get bodyUsed () { - return this[INTERNALS].disturbed - } - - arrayBuffer () { - return this[CONSUME_BODY]().then(buf => - buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) - } - - blob () { - const ct = this.headers && this.headers.get('content-type') || '' - return this[CONSUME_BODY]().then(buf => Object.assign( - new Blob([], { type: ct.toLowerCase() }), - { [BUFFER]: buf } - )) - } - - async json () { - const buf = await this[CONSUME_BODY]() - try { - return JSON.parse(buf.toString()) - } catch (er) { - throw new FetchError( - `invalid json response body at ${this.url} reason: ${er.message}`, - 'invalid-json' - ) - } - } - - text () { - return this[CONSUME_BODY]().then(buf => buf.toString()) - } - - buffer () { - return this[CONSUME_BODY]() - } - - textConverted () { - return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) - } - - [CONSUME_BODY] () { - if (this[INTERNALS].disturbed) { - return Promise.reject(new TypeError(`body used already for: ${ - this.url}`)) - } - - this[INTERNALS].disturbed = true - - if (this[INTERNALS].error) { - return Promise.reject(this[INTERNALS].error) - } - - // body is null - if (this.body === null) { - return Promise.resolve(Buffer.alloc(0)) - } - - if (Buffer.isBuffer(this.body)) { - return Promise.resolve(this.body) - } - - const upstream = isBlob(this.body) ? this.body.stream() : this.body - - /* istanbul ignore if: should never happen */ - if (!Minipass.isStream(upstream)) { - return Promise.resolve(Buffer.alloc(0)) - } - - const stream = this.size && upstream instanceof MinipassSized ? upstream - : !this.size && upstream instanceof Minipass && - !(upstream instanceof MinipassSized) ? upstream - : this.size ? new MinipassSized({ size: this.size }) - : new Minipass() - - // allow timeout on slow response body, but only if the stream is still writable. this - // makes the timeout center on the socket stream from lib/index.js rather than the - // intermediary minipass stream we create to receive the data - const resTimeout = this.timeout && stream.writable ? setTimeout(() => { - stream.emit('error', new FetchError( - `Response timeout while trying to fetch ${ - this.url} (over ${this.timeout}ms)`, 'body-timeout')) - }, this.timeout) : null - - // do not keep the process open just for this timeout, even - // though we expect it'll get cleared eventually. - if (resTimeout && resTimeout.unref) { - resTimeout.unref() - } - - // do the pipe in the promise, because the pipe() can send too much - // data through right away and upset the MP Sized object - return new Promise((resolve) => { - // if the stream is some other kind of stream, then pipe through a MP - // so we can collect it more easily. - if (stream !== upstream) { - upstream.on('error', er => stream.emit('error', er)) - upstream.pipe(stream) - } - resolve() - }).then(() => stream.concat()).then(buf => { - clearTimeout(resTimeout) - return buf - }).catch(er => { - clearTimeout(resTimeout) - // request was aborted, reject with this Error - if (er.name === 'AbortError' || er.name === 'FetchError') { - throw er - } else if (er.name === 'RangeError') { - throw new FetchError(`Could not create Buffer from response body for ${ - this.url}: ${er.message}`, 'system', er) - } else { - // other errors, such as incorrect content-encoding or content-length - throw new FetchError(`Invalid response body while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - } - }) - } - - static clone (instance) { - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used') - } - - const body = instance.body - - // check that body is a stream and not form-data object - // NB: can't clone the form-data object without having it as a dependency - if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { - // create a dedicated tee stream so that we don't lose data - // potentially sitting in the body stream's buffer by writing it - // immediately to p1 and not having it for p2. - const tee = new Minipass() - const p1 = new Minipass() - const p2 = new Minipass() - tee.on('error', er => { - p1.emit('error', er) - p2.emit('error', er) - }) - body.on('error', er => tee.emit('error', er)) - tee.pipe(p1) - tee.pipe(p2) - body.pipe(tee) - // set instance body to one fork, return the other - instance[INTERNALS].body = p1 - return p2 - } else { - return instance.body - } - } - - static extractContentType (body) { - return body === null || body === undefined ? null - : typeof body === 'string' ? 'text/plain;charset=UTF-8' - : isURLSearchParams(body) - ? 'application/x-www-form-urlencoded;charset=UTF-8' - : isBlob(body) ? body.type || null - : Buffer.isBuffer(body) ? null - : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null - : ArrayBuffer.isView(body) ? null - : typeof body.getBoundary === 'function' - ? `multipart/form-data;boundary=${body.getBoundary()}` - : Minipass.isStream(body) ? null - : 'text/plain;charset=UTF-8' - } - - static getTotalBytes (instance) { - const { body } = instance - return (body === null || body === undefined) ? 0 - : isBlob(body) ? body.size - : Buffer.isBuffer(body) ? body.length - : body && typeof body.getLengthSync === 'function' && ( - // detect form data input from form-data module - body._lengthRetrievers && - /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) // 2.x - ? body.getLengthSync() - : null - } - - static writeToStream (dest, instance) { - const { body } = instance - - if (body === null || body === undefined) { - dest.end() - } else if (Buffer.isBuffer(body) || typeof body === 'string') { - dest.end(body) - } else { - // body is stream or blob - const stream = isBlob(body) ? body.stream() : body - stream.on('error', er => dest.emit('error', er)).pipe(dest) - } - - return dest - } -} - -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true }, -}) - -const isURLSearchParams = obj => - // Duck-typing as a necessary condition. - (typeof obj !== 'object' || - typeof obj.append !== 'function' || - typeof obj.delete !== 'function' || - typeof obj.get !== 'function' || - typeof obj.getAll !== 'function' || - typeof obj.has !== 'function' || - typeof obj.set !== 'function') ? false - // Brand-checking and more duck-typing as optional condition. - : obj.constructor.name === 'URLSearchParams' || - Object.prototype.toString.call(obj) === '[object URLSearchParams]' || - typeof obj.sort === 'function' - -const isBlob = obj => - typeof obj === 'object' && - typeof obj.arrayBuffer === 'function' && - typeof obj.type === 'string' && - typeof obj.stream === 'function' && - typeof obj.constructor === 'function' && - typeof obj.constructor.name === 'string' && - /^(Blob|File)$/.test(obj.constructor.name) && - /^(Blob|File)$/.test(obj[Symbol.toStringTag]) - -const convertBody = (buffer, headers) => { - /* istanbul ignore if */ - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function') - } - - const ct = headers && headers.get('content-type') - let charset = 'utf-8' - let res - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct) - } - - // no charset in content type, peek at response body for at most 1024 bytes - const str = buffer.slice(0, 1024).toString() - - // html5 - if (!res && str) { - res = / this.expect - ? 'max-size' : type - this.message = message - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'FetchError' - } - - // don't allow name to be overwritten - set name (n) {} - - get [Symbol.toStringTag] () { - return 'FetchError' - } -} -module.exports = FetchError diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js b/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js deleted file mode 100644 index dd6e854d5ba39..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js +++ /dev/null @@ -1,267 +0,0 @@ -'use strict' -const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - -const validateName = name => { - name = `${name}` - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`) - } -} - -const validateValue = value => { - value = `${value}` - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`) - } -} - -const find = (map, name) => { - name = name.toLowerCase() - for (const key in map) { - if (key.toLowerCase() === name) { - return key - } - } - return undefined -} - -const MAP = Symbol('map') -class Headers { - constructor (init = undefined) { - this[MAP] = Object.create(null) - if (init instanceof Headers) { - const rawHeaders = init.raw() - const headerNames = Object.keys(rawHeaders) - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value) - } - } - return - } - - // no-op - if (init === undefined || init === null) { - return - } - - if (typeof init === 'object') { - const method = init[Symbol.iterator] - if (method !== null && method !== undefined) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable') - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = [] - for (const pair of init) { - if (typeof pair !== 'object' || - typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable') - } - const arrPair = Array.from(pair) - if (arrPair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple') - } - pairs.push(arrPair) - } - - for (const pair of pairs) { - this.append(pair[0], pair[1]) - } - } else { - // record - for (const key of Object.keys(init)) { - this.append(key, init[key]) - } - } - } else { - throw new TypeError('Provided initializer must be an object') - } - } - - get (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key === undefined) { - return null - } - - return this[MAP][key].join(', ') - } - - forEach (callback, thisArg = undefined) { - let pairs = getHeaders(this) - for (let i = 0; i < pairs.length; i++) { - const [name, value] = pairs[i] - callback.call(thisArg, value, name, this) - // refresh in case the callback added more headers - pairs = getHeaders(this) - } - } - - set (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - this[MAP][key !== undefined ? key : name] = [value] - } - - append (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - if (key !== undefined) { - this[MAP][key].push(value) - } else { - this[MAP][name] = [value] - } - } - - has (name) { - name = `${name}` - validateName(name) - return find(this[MAP], name) !== undefined - } - - delete (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key !== undefined) { - delete this[MAP][key] - } - } - - raw () { - return this[MAP] - } - - keys () { - return new HeadersIterator(this, 'key') - } - - values () { - return new HeadersIterator(this, 'value') - } - - [Symbol.iterator] () { - return new HeadersIterator(this, 'key+value') - } - - entries () { - return new HeadersIterator(this, 'key+value') - } - - get [Symbol.toStringTag] () { - return 'Headers' - } - - static exportNodeCompatibleHeaders (headers) { - const obj = Object.assign(Object.create(null), headers[MAP]) - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host') - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0] - } - - return obj - } - - static createHeadersLenient (obj) { - const headers = new Headers() - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue - } - - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue - } - - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val] - } else { - headers[MAP][name].push(val) - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]] - } - } - return headers - } -} - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true }, -}) - -const getHeaders = (headers, kind = 'key+value') => - Object.keys(headers[MAP]).sort().map( - kind === 'key' ? k => k.toLowerCase() - : kind === 'value' ? k => headers[MAP][k].join(', ') - : k => [k.toLowerCase(), headers[MAP][k].join(', ')] - ) - -const INTERNAL = Symbol('internal') - -class HeadersIterator { - constructor (target, kind) { - this[INTERNAL] = { - target, - kind, - index: 0, - } - } - - get [Symbol.toStringTag] () { - return 'HeadersIterator' - } - - next () { - /* istanbul ignore if: should be impossible */ - if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { - throw new TypeError('Value of `this` is not a HeadersIterator') - } - - const { target, kind, index } = this[INTERNAL] - const values = getHeaders(target, kind) - const len = values.length - if (index >= len) { - return { - value: undefined, - done: true, - } - } - - this[INTERNAL].index++ - - return { value: values[index], done: false } - } -} - -// manually extend because 'extends' requires a ctor -Object.setPrototypeOf(HeadersIterator.prototype, - Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) - -module.exports = Headers diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js b/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js deleted file mode 100644 index da402161670e6..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js +++ /dev/null @@ -1,377 +0,0 @@ -'use strict' -const { URL } = require('url') -const http = require('http') -const https = require('https') -const zlib = require('minizlib') -const { Minipass } = require('minipass') - -const Body = require('./body.js') -const { writeToStream, getTotalBytes } = Body -const Response = require('./response.js') -const Headers = require('./headers.js') -const { createHeadersLenient } = Headers -const Request = require('./request.js') -const { getNodeRequestOptions } = Request -const FetchError = require('./fetch-error.js') -const AbortError = require('./abort-error.js') - -// XXX this should really be split up and unit-ized for easier testing -// and better DRY implementation of data/http request aborting -const fetch = async (url, opts) => { - if (/^data:/.test(url)) { - const request = new Request(url, opts) - // delay 1 promise tick so that the consumer can abort right away - return Promise.resolve().then(() => new Promise((resolve, reject) => { - let type, data - try { - const { pathname, search } = new URL(url) - const split = pathname.split(',') - if (split.length < 2) { - throw new Error('invalid data: URI') - } - const mime = split.shift() - const base64 = /;base64$/.test(mime) - type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime - const rawData = decodeURIComponent(split.join(',') + search) - data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) - } catch (er) { - return reject(new FetchError(`[${request.method}] ${ - request.url} invalid URL, ${er.message}`, 'system', er)) - } - - const { signal } = request - if (signal && signal.aborted) { - return reject(new AbortError('The user aborted a request.')) - } - - const headers = { 'Content-Length': data.length } - if (type) { - headers['Content-Type'] = type - } - return resolve(new Response(data, { headers })) - })) - } - - return new Promise((resolve, reject) => { - // build request object - const request = new Request(url, opts) - let options - try { - options = getNodeRequestOptions(request) - } catch (er) { - return reject(er) - } - - const send = (options.protocol === 'https:' ? https : http).request - const { signal } = request - let response = null - const abort = () => { - const error = new AbortError('The user aborted a request.') - reject(error) - if (Minipass.isStream(request.body) && - typeof request.body.destroy === 'function') { - request.body.destroy(error) - } - if (response && response.body) { - response.body.emit('error', error) - } - } - - if (signal && signal.aborted) { - return abort() - } - - const abortAndFinalize = () => { - abort() - finalize() - } - - const finalize = () => { - req.abort() - if (signal) { - signal.removeEventListener('abort', abortAndFinalize) - } - clearTimeout(reqTimeout) - } - - // send request - const req = send(options) - - if (signal) { - signal.addEventListener('abort', abortAndFinalize) - } - - let reqTimeout = null - if (request.timeout) { - req.once('socket', () => { - reqTimeout = setTimeout(() => { - reject(new FetchError(`network timeout at: ${ - request.url}`, 'request-timeout')) - finalize() - }, request.timeout) - }) - } - - req.on('error', er => { - // if a 'response' event is emitted before the 'error' event, then by the - // time this handler is run it's too late to reject the Promise for the - // response. instead, we forward the error event to the response stream - // so that the error will surface to the user when they try to consume - // the body. this is done as a side effect of aborting the request except - // for in windows, where we must forward the event manually, otherwise - // there is no longer a ref'd socket attached to the request and the - // stream never ends so the event loop runs out of work and the process - // exits without warning. - // coverage skipped here due to the difficulty in testing - // istanbul ignore next - if (req.res) { - req.res.emit('error', er) - } - reject(new FetchError(`request to ${request.url} failed, reason: ${ - er.message}`, 'system', er)) - finalize() - }) - - req.on('response', res => { - clearTimeout(reqTimeout) - - const headers = createHeadersLenient(res.headers) - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location') - - // HTTP fetch step 5.3 - let locationURL = null - try { - locationURL = location === null ? null : new URL(location, request.url).toString() - } catch { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - /* eslint-disable-next-line max-len */ - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) - finalize() - return - } - } - - // HTTP fetch step 5.5 - if (request.redirect === 'error') { - reject(new FetchError('uri requested responds with a redirect, ' + - `redirect mode is set to error: ${request.url}`, 'no-redirect')) - finalize() - return - } else if (request.redirect === 'manual') { - // node-fetch-specific step: make manual redirect a bit easier to - // use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL) - } catch (err) { - /* istanbul ignore next: nodejs server prevent invalid - response headers, we can't test this through normal - request */ - reject(err) - } - } - } else if (request.redirect === 'follow' && locationURL !== null) { - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${ - request.url}`, 'max-redirect')) - finalize() - return - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && - request.body && - getTotalBytes(request) === null) { - reject(new FetchError( - 'Cannot follow redirect with body being a readable stream', - 'unsupported-redirect' - )) - finalize() - return - } - - // Update host due to redirection - request.headers.set('host', (new URL(locationURL)).host) - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - } - - // if the redirect is to a new hostname, strip the authorization and cookie headers - const parsedOriginal = new URL(request.url) - const parsedRedirect = new URL(locationURL) - if (parsedOriginal.hostname !== parsedRedirect.hostname) { - requestOpts.headers.delete('authorization') - requestOpts.headers.delete('cookie') - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || ( - (res.statusCode === 301 || res.statusCode === 302) && - request.method === 'POST' - )) { - requestOpts.method = 'GET' - requestOpts.body = undefined - requestOpts.headers.delete('content-length') - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))) - finalize() - return - } - } // end if(isRedirect) - - // prepare response - res.once('end', () => - signal && signal.removeEventListener('abort', abortAndFinalize)) - - const body = new Minipass() - // if an error occurs, either on the response stream itself, on one of the - // decoder streams, or a response length timeout from the Body class, we - // forward the error through to our internal body stream. If we see an - // error event on that, we call finalize to abort the request and ensure - // we don't leave a socket believing a request is in flight. - // this is difficult to test, so lacks specific coverage. - body.on('error', finalize) - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) - res.on('data', (chunk) => body.write(chunk)) - res.on('end', () => body.end()) - - const responseOptions = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter, - trailer: new Promise(resolveTrailer => - res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), - } - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding') - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || - request.method === 'HEAD' || - codings === null || - res.statusCode === 204 || - res.statusCode === 304) { - response = new Response(body, responseOptions) - resolve(response) - return - } - - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.constants.Z_SYNC_FLUSH, - finishFlush: zlib.constants.Z_SYNC_FLUSH, - } - - // for gzip - if (codings === 'gzip' || codings === 'x-gzip') { - const unzip = new zlib.Gunzip(zlibOptions) - response = new Response( - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), - responseOptions - ) - resolve(response) - return - } - - // for deflate - if (codings === 'deflate' || codings === 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new Minipass()) - raw.once('data', chunk => { - // see http://stackoverflow.com/questions/37519828 - const decoder = (chunk[0] & 0x0F) === 0x08 - ? new zlib.Inflate() - : new zlib.InflateRaw() - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - }) - return - } - - // for br - if (codings === 'br') { - // ignoring coverage so tests don't have to fake support (or lack of) for brotli - // istanbul ignore next - try { - var decoder = new zlib.BrotliDecompress() - } catch (err) { - reject(err) - finalize() - return - } - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - return - } - - // otherwise, use response as-is - response = new Response(body, responseOptions) - resolve(response) - }) - - writeToStream(req, request) - }) -} - -module.exports = fetch - -fetch.isRedirect = code => - code === 301 || - code === 302 || - code === 303 || - code === 307 || - code === 308 - -fetch.Headers = Headers -fetch.Request = Request -fetch.Response = Response -fetch.FetchError = FetchError -fetch.AbortError = AbortError diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js b/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js deleted file mode 100644 index 054439e669910..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js +++ /dev/null @@ -1,282 +0,0 @@ -'use strict' -const { URL } = require('url') -const { Minipass } = require('minipass') -const Headers = require('./headers.js') -const { exportNodeCompatibleHeaders } = Headers -const Body = require('./body.js') -const { clone, extractContentType, getTotalBytes } = Body - -const version = require('../package.json').version -const defaultUserAgent = - `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` - -const INTERNALS = Symbol('Request internals') - -const isRequest = input => - typeof input === 'object' && typeof input[INTERNALS] === 'object' - -const isAbortSignal = signal => { - const proto = ( - signal - && typeof signal === 'object' - && Object.getPrototypeOf(signal) - ) - return !!(proto && proto.constructor.name === 'AbortSignal') -} - -class Request extends Body { - constructor (input, init = {}) { - const parsedURL = isRequest(input) ? new URL(input.url) - : input && input.href ? new URL(input.href) - : new URL(`${input}`) - - if (isRequest(input)) { - init = { ...input[INTERNALS], ...init } - } else if (!input || typeof input === 'string') { - input = {} - } - - const method = (init.method || input.method || 'GET').toUpperCase() - const isGETHEAD = method === 'GET' || method === 'HEAD' - - if ((init.body !== null && init.body !== undefined || - isRequest(input) && input.body !== null) && isGETHEAD) { - throw new TypeError('Request with GET/HEAD method cannot have body') - } - - const inputBody = init.body !== null && init.body !== undefined ? init.body - : isRequest(input) && input.body !== null ? clone(input) - : null - - super(inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0, - }) - - const headers = new Headers(init.headers || input.headers || {}) - - if (inputBody !== null && inputBody !== undefined && - !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - const signal = 'signal' in init ? init.signal - : null - - if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { - throw new TypeError('Expected signal must be an instanceof AbortSignal') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = init - - this[INTERNALS] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow - : input.follow !== undefined ? input.follow - : 20 - this.compress = init.compress !== undefined ? init.compress - : input.compress !== undefined ? input.compress - : true - this.counter = init.counter || input.counter || 0 - this.agent = init.agent || input.agent - } - - get method () { - return this[INTERNALS].method - } - - get url () { - return this[INTERNALS].parsedURL.toString() - } - - get headers () { - return this[INTERNALS].headers - } - - get redirect () { - return this[INTERNALS].redirect - } - - get signal () { - return this[INTERNALS].signal - } - - clone () { - return new Request(this) - } - - get [Symbol.toStringTag] () { - return 'Request' - } - - static getNodeRequestOptions (request) { - const parsedURL = request[INTERNALS].parsedURL - const headers = new Headers(request[INTERNALS].headers) - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*') - } - - // Basic fetch - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported') - } - - if (request.signal && - Minipass.isStream(request.body) && - typeof request.body.destroy !== 'function') { - throw new Error( - 'Cancellation of streamed requests with AbortSignal is not supported') - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - const contentLengthValue = - (request.body === null || request.body === undefined) && - /^(POST|PUT)$/i.test(request.method) ? '0' - : request.body !== null && request.body !== undefined - ? getTotalBytes(request) - : null - - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue + '') - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', defaultUserAgent) - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate') - } - - const agent = typeof request.agent === 'function' - ? request.agent(parsedURL) - : request.agent - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = request[INTERNALS] - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - // we cannot spread parsedURL directly, so we have to read each property one-by-one - // and map them to the equivalent https?.request() method options - const urlProps = { - auth: parsedURL.username || parsedURL.password - ? `${parsedURL.username}:${parsedURL.password}` - : '', - host: parsedURL.host, - hostname: parsedURL.hostname, - path: `${parsedURL.pathname}${parsedURL.search}`, - port: parsedURL.port, - protocol: parsedURL.protocol, - } - - return { - ...urlProps, - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - timeout: request.timeout, - } - } -} - -module.exports = Request - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true }, -}) diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js b/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js deleted file mode 100644 index 54cb52db3594a..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' -const http = require('http') -const { STATUS_CODES } = http - -const Headers = require('./headers.js') -const Body = require('./body.js') -const { clone, extractContentType } = Body - -const INTERNALS = Symbol('Response internals') - -class Response extends Body { - constructor (body = null, opts = {}) { - super(body, opts) - - const status = opts.status || 200 - const headers = new Headers(opts.headers) - - if (body !== null && body !== undefined && !headers.has('Content-Type')) { - const contentType = extractContentType(body) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - this[INTERNALS] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter, - trailer: Promise.resolve(opts.trailer || new Headers()), - } - } - - get trailer () { - return this[INTERNALS].trailer - } - - get url () { - return this[INTERNALS].url || '' - } - - get status () { - return this[INTERNALS].status - } - - get ok () { - return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 - } - - get redirected () { - return this[INTERNALS].counter > 0 - } - - get statusText () { - return this[INTERNALS].statusText - } - - get headers () { - return this[INTERNALS].headers - } - - clone () { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected, - trailer: this.trailer, - }) - } - - get [Symbol.toStringTag] () { - return 'Response' - } -} - -module.exports = Response - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true }, -}) diff --git a/node_modules/tuf-js/node_modules/minipass-fetch/package.json b/node_modules/tuf-js/node_modules/minipass-fetch/package.json deleted file mode 100644 index d491a7fba126d..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass-fetch/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "minipass-fetch", - "version": "3.0.5", - "description": "An implementation of window.fetch in Node.js using Minipass streams", - "license": "MIT", - "main": "lib/index.js", - "scripts": { - "test:tls-fixtures": "./test/fixtures/tls/setup.sh", - "test": "tap", - "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "tap": { - "coverage-map": "map.js", - "check-coverage": true, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "@ungap/url-search-params": "^0.2.2", - "abort-controller": "^3.0.0", - "abortcontroller-polyfill": "~1.7.3", - "encoding": "^0.1.13", - "form-data": "^4.0.0", - "nock": "^13.2.4", - "parted": "^0.1.1", - "string-to-arraybuffer": "^1.0.2", - "tap": "^16.0.0" - }, - "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/minipass-fetch.git" - }, - "keywords": [ - "fetch", - "minipass", - "node-fetch", - "window.fetch" - ], - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", - "publish": "true" - } -} diff --git a/node_modules/tuf-js/node_modules/proc-log/LICENSE b/node_modules/tuf-js/node_modules/proc-log/LICENSE deleted file mode 100644 index 83837797202b7..0000000000000 --- a/node_modules/tuf-js/node_modules/proc-log/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) GitHub, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/proc-log/lib/index.js b/node_modules/tuf-js/node_modules/proc-log/lib/index.js deleted file mode 100644 index 86d90861078da..0000000000000 --- a/node_modules/tuf-js/node_modules/proc-log/lib/index.js +++ /dev/null @@ -1,153 +0,0 @@ -const META = Symbol('proc-log.meta') -module.exports = { - META: META, - output: { - LEVELS: [ - 'standard', - 'error', - 'buffer', - 'flush', - ], - KEYS: { - standard: 'standard', - error: 'error', - buffer: 'buffer', - flush: 'flush', - }, - standard: function (...args) { - return process.emit('output', 'standard', ...args) - }, - error: function (...args) { - return process.emit('output', 'error', ...args) - }, - buffer: function (...args) { - return process.emit('output', 'buffer', ...args) - }, - flush: function (...args) { - return process.emit('output', 'flush', ...args) - }, - }, - log: { - LEVELS: [ - 'notice', - 'error', - 'warn', - 'info', - 'verbose', - 'http', - 'silly', - 'timing', - 'pause', - 'resume', - ], - KEYS: { - notice: 'notice', - error: 'error', - warn: 'warn', - info: 'info', - verbose: 'verbose', - http: 'http', - silly: 'silly', - timing: 'timing', - pause: 'pause', - resume: 'resume', - }, - error: function (...args) { - return process.emit('log', 'error', ...args) - }, - notice: function (...args) { - return process.emit('log', 'notice', ...args) - }, - warn: function (...args) { - return process.emit('log', 'warn', ...args) - }, - info: function (...args) { - return process.emit('log', 'info', ...args) - }, - verbose: function (...args) { - return process.emit('log', 'verbose', ...args) - }, - http: function (...args) { - return process.emit('log', 'http', ...args) - }, - silly: function (...args) { - return process.emit('log', 'silly', ...args) - }, - timing: function (...args) { - return process.emit('log', 'timing', ...args) - }, - pause: function () { - return process.emit('log', 'pause') - }, - resume: function () { - return process.emit('log', 'resume') - }, - }, - time: { - LEVELS: [ - 'start', - 'end', - ], - KEYS: { - start: 'start', - end: 'end', - }, - start: function (name, fn) { - process.emit('time', 'start', name) - function end () { - return process.emit('time', 'end', name) - } - if (typeof fn === 'function') { - const res = fn() - if (res && res.finally) { - return res.finally(end) - } - end() - return res - } - return end - }, - end: function (name) { - return process.emit('time', 'end', name) - }, - }, - input: { - LEVELS: [ - 'start', - 'end', - 'read', - ], - KEYS: { - start: 'start', - end: 'end', - read: 'read', - }, - start: function (fn) { - process.emit('input', 'start') - function end () { - return process.emit('input', 'end') - } - if (typeof fn === 'function') { - const res = fn() - if (res && res.finally) { - return res.finally(end) - } - end() - return res - } - return end - }, - end: function () { - return process.emit('input', 'end') - }, - read: function (...args) { - let resolve, reject - const promise = new Promise((_resolve, _reject) => { - resolve = _resolve - reject = _reject - }) - process.emit('input', 'read', resolve, reject, ...args) - return promise - }, - }, -} diff --git a/node_modules/tuf-js/node_modules/proc-log/package.json b/node_modules/tuf-js/node_modules/proc-log/package.json deleted file mode 100644 index 4ab89102ecc9b..0000000000000 --- a/node_modules/tuf-js/node_modules/proc-log/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "proc-log", - "version": "4.2.0", - "files": [ - "bin/", - "lib/" - ], - "main": "lib/index.js", - "description": "just emit 'log' events on the process object", - "repository": { - "type": "git", - "url": "https://github.com/npm/proc-log.git" - }, - "author": "GitHub Inc.", - "license": "ISC", - "scripts": { - "test": "tap", - "snap": "tap", - "posttest": "npm run lint", - "postsnap": "eslint index.js test/*.js --fix", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", - "tap": "^16.0.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", - "publish": true - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/tuf-js/node_modules/ssri/LICENSE.md b/node_modules/tuf-js/node_modules/ssri/LICENSE.md deleted file mode 100644 index e335388869f50..0000000000000 --- a/node_modules/tuf-js/node_modules/ssri/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2021 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/ssri/lib/index.js b/node_modules/tuf-js/node_modules/ssri/lib/index.js deleted file mode 100644 index 7d749ed480fb9..0000000000000 --- a/node_modules/tuf-js/node_modules/ssri/lib/index.js +++ /dev/null @@ -1,580 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { Minipass } = require('minipass') - -const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] -const DEFAULT_ALGORITHMS = ['sha512'] - -// TODO: this should really be a hardcoded list of algorithms we support, -// rather than [a-z0-9]. -const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i -const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ -const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ -const VCHAR_REGEX = /^[\x21-\x7E]+$/ - -const getOptString = options => options?.length ? `?${options.join('?')}` : '' - -class IntegrityStream extends Minipass { - #emittedIntegrity - #emittedSize - #emittedVerified - - constructor (opts) { - super() - this.size = 0 - this.opts = opts - - // may be overridden later, but set now for class consistency - this.#getOptions() - - // options used for calculating stream. can't be changed. - if (opts?.algorithms) { - this.algorithms = [...opts.algorithms] - } else { - this.algorithms = [...DEFAULT_ALGORITHMS] - } - if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { - this.algorithms.push(this.algorithm) - } - - this.hashes = this.algorithms.map(crypto.createHash) - } - - #getOptions () { - // For verification - this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null - this.expectedSize = this.opts?.size - - if (!this.sri) { - this.algorithm = null - } else if (this.sri.isHash) { - this.goodSri = true - this.algorithm = this.sri.algorithm - } else { - this.goodSri = !this.sri.isEmpty() - this.algorithm = this.sri.pickAlgorithm(this.opts) - } - - this.digests = this.goodSri ? this.sri[this.algorithm] : null - this.optString = getOptString(this.opts?.options) - } - - on (ev, handler) { - if (ev === 'size' && this.#emittedSize) { - return handler(this.#emittedSize) - } - - if (ev === 'integrity' && this.#emittedIntegrity) { - return handler(this.#emittedIntegrity) - } - - if (ev === 'verified' && this.#emittedVerified) { - return handler(this.#emittedVerified) - } - - return super.on(ev, handler) - } - - emit (ev, data) { - if (ev === 'end') { - this.#onEnd() - } - return super.emit(ev, data) - } - - write (data) { - this.size += data.length - this.hashes.forEach(h => h.update(data)) - return super.write(data) - } - - #onEnd () { - if (!this.goodSri) { - this.#getOptions() - } - const newSri = parse(this.hashes.map((h, i) => { - return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` - }).join(' '), this.opts) - // Integrity verification mode - const match = this.goodSri && newSri.match(this.sri, this.opts) - if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { - /* eslint-disable-next-line max-len */ - const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) - err.code = 'EBADSIZE' - err.found = this.size - err.expected = this.expectedSize - err.sri = this.sri - this.emit('error', err) - } else if (this.sri && !match) { - /* eslint-disable-next-line max-len */ - const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = this.digests - err.algorithm = this.algorithm - err.sri = this.sri - this.emit('error', err) - } else { - this.#emittedSize = this.size - this.emit('size', this.size) - this.#emittedIntegrity = newSri - this.emit('integrity', newSri) - if (match) { - this.#emittedVerified = match - this.emit('verified', match) - } - } - } -} - -class Hash { - get isHash () { - return true - } - - constructor (hash, opts) { - const strict = opts?.strict - this.source = hash.trim() - - // set default values so that we make V8 happy to - // always see a familiar object template. - this.digest = '' - this.algorithm = '' - this.options = [] - - // 3.1. Integrity metadata (called "Hash" by ssri) - // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description - const match = this.source.match( - strict - ? STRICT_SRI_REGEX - : SRI_REGEX - ) - if (!match) { - return - } - if (strict && !SPEC_ALGORITHMS.includes(match[1])) { - return - } - this.algorithm = match[1] - this.digest = match[2] - - const rawOpts = match[3] - if (rawOpts) { - this.options = rawOpts.slice(1).split('?') - } - } - - hexDigest () { - return this.digest && Buffer.from(this.digest, 'base64').toString('hex') - } - - toJSON () { - return this.toString() - } - - match (integrity, opts) { - const other = parse(integrity, opts) - if (!other) { - return false - } - if (other.isIntegrity) { - const algo = other.pickAlgorithm(opts, [this.algorithm]) - - if (!algo) { - return false - } - - const foundHash = other[algo].find(hash => hash.digest === this.digest) - - if (foundHash) { - return foundHash - } - - return false - } - return other.digest === this.digest ? other : false - } - - toString (opts) { - if (opts?.strict) { - // Strict mode enforces the standard as close to the foot of the - // letter as it can. - if (!( - // The spec has very restricted productions for algorithms. - // https://www.w3.org/TR/CSP2/#source-list-syntax - SPEC_ALGORITHMS.includes(this.algorithm) && - // Usually, if someone insists on using a "different" base64, we - // leave it as-is, since there's multiple standards, and the - // specified is not a URL-safe variant. - // https://www.w3.org/TR/CSP2/#base64_value - this.digest.match(BASE64_REGEX) && - // Option syntax is strictly visual chars. - // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression - // https://tools.ietf.org/html/rfc5234#appendix-B.1 - this.options.every(opt => opt.match(VCHAR_REGEX)) - )) { - return '' - } - } - return `${this.algorithm}-${this.digest}${getOptString(this.options)}` - } -} - -function integrityHashToString (toString, sep, opts, hashes) { - const toStringIsNotEmpty = toString !== '' - - let shouldAddFirstSep = false - let complement = '' - - const lastIndex = hashes.length - 1 - - for (let i = 0; i < lastIndex; i++) { - const hashString = Hash.prototype.toString.call(hashes[i], opts) - - if (hashString) { - shouldAddFirstSep = true - - complement += hashString - complement += sep - } - } - - const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) - - if (finalHashString) { - shouldAddFirstSep = true - complement += finalHashString - } - - if (toStringIsNotEmpty && shouldAddFirstSep) { - return toString + sep + complement - } - - return toString + complement -} - -class Integrity { - get isIntegrity () { - return true - } - - toJSON () { - return this.toString() - } - - isEmpty () { - return Object.keys(this).length === 0 - } - - toString (opts) { - let sep = opts?.sep || ' ' - let toString = '' - - if (opts?.strict) { - // Entries must be separated by whitespace, according to spec. - sep = sep.replace(/\S+/g, ' ') - - for (const hash of SPEC_ALGORITHMS) { - if (this[hash]) { - toString = integrityHashToString(toString, sep, opts, this[hash]) - } - } - } else { - for (const hash of Object.keys(this)) { - toString = integrityHashToString(toString, sep, opts, this[hash]) - } - } - - return toString - } - - concat (integrity, opts) { - const other = typeof integrity === 'string' - ? integrity - : stringify(integrity, opts) - return parse(`${this.toString(opts)} ${other}`, opts) - } - - hexDigest () { - return parse(this, { single: true }).hexDigest() - } - - // add additional hashes to an integrity value, but prevent - // *changing* an existing integrity hash. - merge (integrity, opts) { - const other = parse(integrity, opts) - for (const algo in other) { - if (this[algo]) { - if (!this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest))) { - throw new Error('hashes do not match, cannot update integrity') - } - } else { - this[algo] = other[algo] - } - } - } - - match (integrity, opts) { - const other = parse(integrity, opts) - if (!other) { - return false - } - const algo = other.pickAlgorithm(opts, Object.keys(this)) - return ( - !!algo && - this[algo] && - other[algo] && - this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest - ) - ) - ) || false - } - - // Pick the highest priority algorithm present, optionally also limited to a - // set of hashes found in another integrity. When limiting it may return - // nothing. - pickAlgorithm (opts, hashes) { - const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash - const keys = Object.keys(this).filter(k => { - if (hashes?.length) { - return hashes.includes(k) - } - return true - }) - if (keys.length) { - return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) - } - // no intersection between this and hashes, - return null - } -} - -module.exports.parse = parse -function parse (sri, opts) { - if (!sri) { - return null - } - if (typeof sri === 'string') { - return _parse(sri, opts) - } else if (sri.algorithm && sri.digest) { - const fullSri = new Integrity() - fullSri[sri.algorithm] = [sri] - return _parse(stringify(fullSri, opts), opts) - } else { - return _parse(stringify(sri, opts), opts) - } -} - -function _parse (integrity, opts) { - // 3.4.3. Parse metadata - // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata - if (opts?.single) { - return new Hash(integrity, opts) - } - const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { - const hash = new Hash(string, opts) - if (hash.algorithm && hash.digest) { - const algo = hash.algorithm - if (!acc[algo]) { - acc[algo] = [] - } - acc[algo].push(hash) - } - return acc - }, new Integrity()) - return hashes.isEmpty() ? null : hashes -} - -module.exports.stringify = stringify -function stringify (obj, opts) { - if (obj.algorithm && obj.digest) { - return Hash.prototype.toString.call(obj, opts) - } else if (typeof obj === 'string') { - return stringify(parse(obj, opts), opts) - } else { - return Integrity.prototype.toString.call(obj, opts) - } -} - -module.exports.fromHex = fromHex -function fromHex (hexDigest, algorithm, opts) { - const optString = getOptString(opts?.options) - return parse( - `${algorithm}-${ - Buffer.from(hexDigest, 'hex').toString('base64') - }${optString}`, opts - ) -} - -module.exports.fromData = fromData -function fromData (data, opts) { - const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] - const optString = getOptString(opts?.options) - return algorithms.reduce((acc, algo) => { - const digest = crypto.createHash(algo).update(data).digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the string we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) -} - -module.exports.fromStream = fromStream -function fromStream (stream, opts) { - const istream = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(istream) - stream.on('error', reject) - istream.on('error', reject) - let sri - istream.on('integrity', s => { - sri = s - }) - istream.on('end', () => resolve(sri)) - istream.resume() - }) -} - -module.exports.checkData = checkData -function checkData (data, sri, opts) { - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - if (opts?.error) { - throw Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - ) - } else { - return false - } - } - const algorithm = sri.pickAlgorithm(opts) - const digest = crypto.createHash(algorithm).update(data).digest('base64') - const newSri = parse({ algorithm, digest }) - const match = newSri.match(sri, opts) - opts = opts || {} - if (match || !(opts.error)) { - return match - } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { - /* eslint-disable-next-line max-len */ - const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) - err.code = 'EBADSIZE' - err.found = data.length - err.expected = opts.size - err.sri = sri - throw err - } else { - /* eslint-disable-next-line max-len */ - const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = sri - err.algorithm = algorithm - err.sri = sri - throw err - } -} - -module.exports.checkStream = checkStream -function checkStream (stream, sri, opts) { - opts = opts || Object.create(null) - opts.integrity = sri - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - return Promise.reject(Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - )) - } - const checker = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(checker) - stream.on('error', reject) - checker.on('error', reject) - let verified - checker.on('verified', s => { - verified = s - }) - checker.on('end', () => resolve(verified)) - checker.resume() - }) -} - -module.exports.integrityStream = integrityStream -function integrityStream (opts = Object.create(null)) { - return new IntegrityStream(opts) -} - -module.exports.create = createIntegrity -function createIntegrity (opts) { - const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] - const optString = getOptString(opts?.options) - - const hashes = algorithms.map(crypto.createHash) - - return { - update: function (chunk, enc) { - hashes.forEach(h => h.update(chunk, enc)) - return this - }, - digest: function () { - const integrity = algorithms.reduce((acc, algo) => { - const digest = hashes.shift().digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the hash we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) - - return integrity - }, - } -} - -const NODE_HASHES = crypto.getHashes() - -// This is a Best Effort™ at a reasonable priority for hash algos -const DEFAULT_PRIORITY = [ - 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', - // TODO - it's unclear _which_ of these Node will actually use as its name - // for the algorithm, so we guesswork it based on the OpenSSL names. - 'sha3', - 'sha3-256', 'sha3-384', 'sha3-512', - 'sha3_256', 'sha3_384', 'sha3_512', -].filter(algo => NODE_HASHES.includes(algo)) - -function getPrioritizedHash (algo1, algo2) { - /* eslint-disable-next-line max-len */ - return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) - ? algo1 - : algo2 -} diff --git a/node_modules/tuf-js/node_modules/ssri/package.json b/node_modules/tuf-js/node_modules/ssri/package.json deleted file mode 100644 index 28395414e4643..0000000000000 --- a/node_modules/tuf-js/node_modules/ssri/package.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "name": "ssri", - "version": "10.0.6", - "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "prerelease": "npm t", - "postrelease": "npm publish", - "posttest": "npm run lint", - "test": "tap", - "coverage": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap" - }, - "tap": { - "check-coverage": true, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/ssri.git" - }, - "keywords": [ - "w3c", - "web", - "security", - "integrity", - "checksum", - "hashing", - "subresource integrity", - "sri", - "sri hash", - "sri string", - "sri generator", - "html" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", - "publish": "true" - } -} diff --git a/node_modules/tuf-js/node_modules/unique-filename/LICENSE b/node_modules/tuf-js/node_modules/unique-filename/LICENSE deleted file mode 100644 index 69619c125ea7e..0000000000000 --- a/node_modules/tuf-js/node_modules/unique-filename/LICENSE +++ /dev/null @@ -1,5 +0,0 @@ -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/unique-filename/lib/index.js b/node_modules/tuf-js/node_modules/unique-filename/lib/index.js deleted file mode 100644 index d067d2e709809..0000000000000 --- a/node_modules/tuf-js/node_modules/unique-filename/lib/index.js +++ /dev/null @@ -1,7 +0,0 @@ -var path = require('path') - -var uniqueSlug = require('unique-slug') - -module.exports = function (filepath, prefix, uniq) { - return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) -} diff --git a/node_modules/tuf-js/node_modules/unique-filename/package.json b/node_modules/tuf-js/node_modules/unique-filename/package.json deleted file mode 100644 index b2fbf0666489a..0000000000000 --- a/node_modules/tuf-js/node_modules/unique-filename/package.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "name": "unique-filename", - "version": "3.0.0", - "description": "Generate a unique filename for use in temporary directories or caches.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-filename.git" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/iarna/unique-filename/issues" - }, - "homepage": "https://github.com/iarna/unique-filename", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.5.1", - "tap": "^16.3.0" - }, - "dependencies": { - "unique-slug": "^4.0.0" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/tuf-js/node_modules/unique-slug/LICENSE b/node_modules/tuf-js/node_modules/unique-slug/LICENSE deleted file mode 100644 index 7953647e7760b..0000000000000 --- a/node_modules/tuf-js/node_modules/unique-slug/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/unique-slug/lib/index.js b/node_modules/tuf-js/node_modules/unique-slug/lib/index.js deleted file mode 100644 index 1bac84d95d730..0000000000000 --- a/node_modules/tuf-js/node_modules/unique-slug/lib/index.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' -var MurmurHash3 = require('imurmurhash') - -module.exports = function (uniq) { - if (uniq) { - var hash = new MurmurHash3(uniq) - return ('00000000' + hash.result().toString(16)).slice(-8) - } else { - return (Math.random().toString(16) + '0000000').slice(2, 10) - } -} diff --git a/node_modules/tuf-js/node_modules/unique-slug/package.json b/node_modules/tuf-js/node_modules/unique-slug/package.json deleted file mode 100644 index 33732cdbb4285..0000000000000 --- a/node_modules/tuf-js/node_modules/unique-slug/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "unique-slug", - "version": "4.0.0", - "description": "Generate a unique character string suitible for use in files and URLs.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "4.5.1", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-slug.git" - }, - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json index 9280719230d9a..e79a3d45f3f06 100644 --- a/node_modules/tuf-js/package.json +++ b/node_modules/tuf-js/package.json @@ -1,12 +1,12 @@ { "name": "tuf-js", - "version": "2.2.1", + "version": "3.0.1", "description": "JavaScript implementation of The Update Framework (TUF)", "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { "build": "tsc --build", - "clean": "rm -rf dist", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", "test": "jest" }, "repository": { @@ -28,16 +28,16 @@ }, "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", "devDependencies": { - "@tufjs/repo-mock": "2.0.1", + "@tufjs/repo-mock": "3.0.1", "@types/debug": "^4.1.12", "@types/make-fetch-happen": "^10.0.4" }, "dependencies": { - "@tufjs/models": "2.0.1", - "debug": "^4.3.4", - "make-fetch-happen": "^13.0.1" + "@tufjs/models": "3.0.1", + "debug": "^4.3.6", + "make-fetch-happen": "^14.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } } diff --git a/package-lock.json b/package-lock.json index b748a0c968d46..0042083f10ce7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -95,7 +95,7 @@ "@npmcli/promise-spawn": "^8.0.2", "@npmcli/redact": "^3.0.0", "@npmcli/run-script": "^9.0.1", - "@sigstore/tuf": "^2.3.4", + "@sigstore/tuf": "^3.0.0", "abbrev": "^3.0.0", "archy": "~1.0.0", "cacache": "^19.0.1", @@ -1686,84 +1686,6 @@ "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/bundle": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.0.0.tgz", - "integrity": "sha512-XDUYX56iMPAn/cdgh/DTJxz5RWmqKV4pwvUAEKEWJl+HzKdCd/24wUa9JYNMlDSCb7SUHAdtksxYX779Nne/Zg==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", - "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", - "license": "Apache-2.0", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/sign": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.0.0.tgz", - "integrity": "sha512-UjhDMQOkyDoktpXoc5YPJpJK6IooF2gayAr5LvXI4EL7O0vd58okgfRcxuaH+YTdhvb5aa1Q9f+WJ0c2sVuYIw==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "make-fetch-happen": "^14.0.1", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/tuf": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.0.0.tgz", - "integrity": "sha512-9Xxy/8U5OFJu7s+OsHzI96IX/OzjF/zj0BSSaWhgJgTqtlBhQIV2xdrQI5qxLD7+CWWDepadnXAxzaZ3u9cvRw==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^3.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/@sigstore/verify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.0.0.tgz", - "integrity": "sha512-Ggtq2GsJuxFNUvQzLoXqRwS4ceRfLAJnrIHUDrzAD0GgnOhwujJkKkxM/s5Bako07c3WtAs/sZo5PJq7VHjeDg==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/@tufjs/models": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", - "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", - "license": "MIT", - "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.5" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { "version": "20.0.0", "resolved": "https://registry.npmjs.org/pacote/-/pacote-20.0.0.tgz", @@ -1795,37 +1717,6 @@ "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/@npmcli/metavuln-calculator/node_modules/sigstore": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.0.0.tgz", - "integrity": "sha512-PHMifhh3EN4loMcHCz6l3v/luzgT3za+9f8subGgeMNjbJjzH4Ij/YoX3Gvu+kaouJRIlVdTHHCREADYf+ZteA==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/sign": "^3.0.0", - "@sigstore/tuf": "^3.0.0", - "@sigstore/verify": "^2.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/tuf-js": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz", - "integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==", - "license": "MIT", - "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.3.6", - "make-fetch-happen": "^14.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, "node_modules/@npmcli/mock-globals": { "resolved": "mock-globals", "link": true @@ -2223,6 +2114,20 @@ "node": "^16.14.0 || >=18.0.0" } }, + "node_modules/@npmcli/template-oss/node_modules/@sigstore/tuf": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.3.4.tgz", + "integrity": "sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^2.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "node_modules/@npmcli/template-oss/node_modules/abbrev": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz", @@ -2604,6 +2509,24 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/template-oss/node_modules/sigstore": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz", + "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^2.3.2", + "@sigstore/tuf": "^2.3.4", + "@sigstore/verify": "^1.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "node_modules/@npmcli/template-oss/node_modules/ssri": { "version": "10.0.6", "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", @@ -2617,6 +2540,21 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/template-oss/node_modules/tuf-js": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.2.1.tgz", + "integrity": "sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tufjs/models": "2.0.1", + "debug": "^4.3.4", + "make-fetch-happen": "^13.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "node_modules/@npmcli/template-oss/node_modules/unique-filename": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", @@ -3076,17 +3014,17 @@ } }, "node_modules/@sigstore/tuf": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.3.4.tgz", - "integrity": "sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.0.0.tgz", + "integrity": "sha512-9Xxy/8U5OFJu7s+OsHzI96IX/OzjF/zj0BSSaWhgJgTqtlBhQIV2xdrQI5qxLD7+CWWDepadnXAxzaZ3u9cvRw==", "inBundle": true, "license": "Apache-2.0", "dependencies": { "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^2.2.1" + "tuf-js": "^3.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, "node_modules/@sigstore/verify": { @@ -3118,7 +3056,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-2.0.1.tgz", "integrity": "sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==", - "inBundle": true, + "dev": true, "license": "MIT", "dependencies": { "@tufjs/canonical-json": "2.0.0", @@ -11520,202 +11458,85 @@ "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/@sigstore/bundle": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.0.0.tgz", - "integrity": "sha512-XDUYX56iMPAn/cdgh/DTJxz5RWmqKV4pwvUAEKEWJl+HzKdCd/24wUa9JYNMlDSCb7SUHAdtksxYX779Nne/Zg==", - "inBundle": true, - "license": "Apache-2.0", + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2" + "callsites": "^3.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": ">=6" } }, - "node_modules/pacote/node_modules/@sigstore/core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", - "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", + "node_modules/parse-conflict-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-4.0.0.tgz", + "integrity": "sha512-37CN2VtcuvKgHUs8+0b1uJeEsbGn61GRHz469C94P5xiOoqpDYJYwjg4RY9Vmz39WyZAVkR5++nbJwLMIgOCnQ==", "inBundle": true, - "license": "Apache-2.0", + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^4.0.0", + "just-diff": "^6.0.0", + "just-diff-apply": "^5.2.0" + }, "engines": { "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/@sigstore/sign": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.0.0.tgz", - "integrity": "sha512-UjhDMQOkyDoktpXoc5YPJpJK6IooF2gayAr5LvXI4EL7O0vd58okgfRcxuaH+YTdhvb5aa1Q9f+WJ0c2sVuYIw==", - "inBundle": true, - "license": "Apache-2.0", + "node_modules/parse-diff": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.11.1.tgz", + "integrity": "sha512-Oq4j8LAOPOcssanQkIjxosjATBIEJhCxMCxPhMu+Ci4wdNmAEdx0O+a7gzbR2PyKXgKPvRLIN5g224+dJAsKHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse-github-repo-url": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz", + "integrity": "sha512-bSWyzBKqcSL4RrncTpGsEKoJ7H8a4L3++ifTAbTFeMHyq2wRV+42DGmQcHIrJIvdcacjIOxEuKH/w4tthF17gg==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "make-fetch-happen": "^14.0.1", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1" + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/pacote/node_modules/@sigstore/tuf": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.0.0.tgz", - "integrity": "sha512-9Xxy/8U5OFJu7s+OsHzI96IX/OzjF/zj0BSSaWhgJgTqtlBhQIV2xdrQI5qxLD7+CWWDepadnXAxzaZ3u9cvRw==", - "inBundle": true, - "license": "Apache-2.0", + "node_modules/parse-json/node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "license": "MIT", "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^3.0.1" + "entities": "^4.4.0" }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/pacote/node_modules/@sigstore/verify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.0.0.tgz", - "integrity": "sha512-Ggtq2GsJuxFNUvQzLoXqRwS4ceRfLAJnrIHUDrzAD0GgnOhwujJkKkxM/s5Bako07c3WtAs/sZo5PJq7VHjeDg==", - "inBundle": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/pacote/node_modules/@tufjs/models": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", - "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", - "inBundle": true, - "license": "MIT", - "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.5" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/pacote/node_modules/sigstore": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.0.0.tgz", - "integrity": "sha512-PHMifhh3EN4loMcHCz6l3v/luzgT3za+9f8subGgeMNjbJjzH4Ij/YoX3Gvu+kaouJRIlVdTHHCREADYf+ZteA==", - "inBundle": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/sign": "^3.0.0", - "@sigstore/tuf": "^3.0.0", - "@sigstore/verify": "^2.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/pacote/node_modules/tuf-js": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz", - "integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==", - "inBundle": true, - "license": "MIT", - "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.3.6", - "make-fetch-happen": "^14.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "license": "MIT", - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-conflict-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-4.0.0.tgz", - "integrity": "sha512-37CN2VtcuvKgHUs8+0b1uJeEsbGn61GRHz469C94P5xiOoqpDYJYwjg4RY9Vmz39WyZAVkR5++nbJwLMIgOCnQ==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "json-parse-even-better-errors": "^4.0.0", - "just-diff": "^6.0.0", - "just-diff-apply": "^5.2.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/parse-diff": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.11.1.tgz", - "integrity": "sha512-Oq4j8LAOPOcssanQkIjxosjATBIEJhCxMCxPhMu+Ci4wdNmAEdx0O+a7gzbR2PyKXgKPvRLIN5g224+dJAsKHA==", - "dev": true, - "license": "MIT" - }, - "node_modules/parse-github-repo-url": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz", - "integrity": "sha512-bSWyzBKqcSL4RrncTpGsEKoJ7H8a4L3++ifTAbTFeMHyq2wRV+42DGmQcHIrJIvdcacjIOxEuKH/w4tthF17gg==", - "dev": true, - "license": "MIT" - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-json/node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "dev": true, - "license": "MIT" - }, - "node_modules/parse5": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", - "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "entities": "^4.4.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" } }, "node_modules/path-exists": { @@ -13080,21 +12901,77 @@ } }, "node_modules/sigstore": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz", - "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==", - "dev": true, + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.0.0.tgz", + "integrity": "sha512-PHMifhh3EN4loMcHCz6l3v/luzgT3za+9f8subGgeMNjbJjzH4Ij/YoX3Gvu+kaouJRIlVdTHHCREADYf+ZteA==", + "inBundle": true, "license": "Apache-2.0", "dependencies": { - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.0.0", + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/sign": "^2.3.2", - "@sigstore/tuf": "^2.3.4", - "@sigstore/verify": "^1.2.1" + "@sigstore/sign": "^3.0.0", + "@sigstore/tuf": "^3.0.0", + "@sigstore/verify": "^2.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/bundle": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.0.0.tgz", + "integrity": "sha512-XDUYX56iMPAn/cdgh/DTJxz5RWmqKV4pwvUAEKEWJl+HzKdCd/24wUa9JYNMlDSCb7SUHAdtksxYX779Nne/Zg==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/core": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", + "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", + "inBundle": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/sign": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.0.0.tgz", + "integrity": "sha512-UjhDMQOkyDoktpXoc5YPJpJK6IooF2gayAr5LvXI4EL7O0vd58okgfRcxuaH+YTdhvb5aa1Q9f+WJ0c2sVuYIw==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^14.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/verify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.0.0.tgz", + "integrity": "sha512-Ggtq2GsJuxFNUvQzLoXqRwS4ceRfLAJnrIHUDrzAD0GgnOhwujJkKkxM/s5Bako07c3WtAs/sZo5PJq7VHjeDg==", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^3.0.0", + "@sigstore/core": "^2.0.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" } }, "node_modules/smart-buffer": { @@ -16212,163 +16089,32 @@ } }, "node_modules/tuf-js": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.2.1.tgz", - "integrity": "sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz", + "integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==", "inBundle": true, "license": "MIT", "dependencies": { - "@tufjs/models": "2.0.1", - "debug": "^4.3.4", - "make-fetch-happen": "^13.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/@npmcli/agent": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", - "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/@npmcli/fs": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", - "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/cacache": { - "version": "18.0.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz", - "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/make-fetch-happen": { - "version": "13.0.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", - "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" + "@tufjs/models": "3.0.1", + "debug": "^4.3.6", + "make-fetch-happen": "^14.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/tuf-js/node_modules/minipass-fetch": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", - "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", + "node_modules/tuf-js/node_modules/@tufjs/models": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", + "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", "inBundle": true, "license": "MIT", "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, - "node_modules/tuf-js/node_modules/proc-log": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", - "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", - "inBundle": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/ssri": { - "version": "10.0.6", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", - "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/unique-filename": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", - "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "unique-slug": "^4.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/unique-slug": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", - "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4" + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, "node_modules/tunnel": { @@ -17612,115 +17358,6 @@ "node": "^18.17.0 || >=20.5.0" } }, - "workspaces/libnpmpublish/node_modules/@sigstore/bundle": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.0.0.tgz", - "integrity": "sha512-XDUYX56iMPAn/cdgh/DTJxz5RWmqKV4pwvUAEKEWJl+HzKdCd/24wUa9JYNMlDSCb7SUHAdtksxYX779Nne/Zg==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "workspaces/libnpmpublish/node_modules/@sigstore/core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", - "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", - "license": "Apache-2.0", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "workspaces/libnpmpublish/node_modules/@sigstore/sign": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.0.0.tgz", - "integrity": "sha512-UjhDMQOkyDoktpXoc5YPJpJK6IooF2gayAr5LvXI4EL7O0vd58okgfRcxuaH+YTdhvb5aa1Q9f+WJ0c2sVuYIw==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "make-fetch-happen": "^14.0.1", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "workspaces/libnpmpublish/node_modules/@sigstore/tuf": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.0.0.tgz", - "integrity": "sha512-9Xxy/8U5OFJu7s+OsHzI96IX/OzjF/zj0BSSaWhgJgTqtlBhQIV2xdrQI5qxLD7+CWWDepadnXAxzaZ3u9cvRw==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^3.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "workspaces/libnpmpublish/node_modules/@sigstore/verify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.0.0.tgz", - "integrity": "sha512-Ggtq2GsJuxFNUvQzLoXqRwS4ceRfLAJnrIHUDrzAD0GgnOhwujJkKkxM/s5Bako07c3WtAs/sZo5PJq7VHjeDg==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "workspaces/libnpmpublish/node_modules/@tufjs/models": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", - "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", - "license": "MIT", - "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.5" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "workspaces/libnpmpublish/node_modules/sigstore": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.0.0.tgz", - "integrity": "sha512-PHMifhh3EN4loMcHCz6l3v/luzgT3za+9f8subGgeMNjbJjzH4Ij/YoX3Gvu+kaouJRIlVdTHHCREADYf+ZteA==", - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.0.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/sign": "^3.0.0", - "@sigstore/tuf": "^3.0.0", - "@sigstore/verify": "^2.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "workspaces/libnpmpublish/node_modules/tuf-js": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz", - "integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==", - "license": "MIT", - "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.3.6", - "make-fetch-happen": "^14.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, "workspaces/libnpmsearch": { "version": "8.0.0", "license": "ISC", diff --git a/package.json b/package.json index c08c442655654..9b15cfd4e903e 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,7 @@ "@npmcli/promise-spawn": "^8.0.2", "@npmcli/redact": "^3.0.0", "@npmcli/run-script": "^9.0.1", - "@sigstore/tuf": "^2.3.4", + "@sigstore/tuf": "^3.0.0", "abbrev": "^3.0.0", "archy": "~1.0.0", "cacache": "^19.0.1",