From 2b3ee8b9aaaae1390d3a3d82c4c0919e57ef461e Mon Sep 17 00:00:00 2001 From: Gar Date: Thu, 24 Oct 2024 10:28:04 -0700 Subject: [PATCH] fix!: remove old audit fallback request BREAKING CHANGE: npm will no longer fall back to the old audit endpoint if the bulk advisory request fails. This legacy code has a long tail in npm. Getting rid of it was difficult because of how load-bearing some of those requests were in tests. This PR removes the old "mock server" that arborist tests spun up, and moved that logic into the existing mock registry that the cli uses. This will allow us to consolidate our logic in tests, and also outline more granularly which tests actually make registry requests. A few tests that were testing just the fallback behavior were also removed. --- mock-registry/lib/index.js | 75 +- package-lock.json | 1 + workspaces/arborist/lib/audit-report.js | 102 +- workspaces/arborist/package.json | 3 +- .../arborist/build-ideal-tree.js.test.cjs | 1 + .../test/arborist/reify.js.test.cjs | 13 +- workspaces/arborist/test/arborist/audit.js | 49 +- .../test/arborist/build-ideal-tree.js | 1329 +++++++++-------- workspaces/arborist/test/arborist/deduper.js | 26 +- workspaces/arborist/test/arborist/pruner.js | 26 +- workspaces/arborist/test/arborist/rebuild.js | 39 +- workspaces/arborist/test/arborist/reify.js | 584 +++++--- workspaces/arborist/test/audit-report.js | 269 ++-- workspaces/arborist/test/fixtures/server.js | 250 ---- 14 files changed, 1333 insertions(+), 1434 deletions(-) delete mode 100644 workspaces/arborist/test/fixtures/server.js diff --git a/mock-registry/lib/index.js b/mock-registry/lib/index.js index e96c9503ca9d8..8fdb46902a373 100644 --- a/mock-registry/lib/index.js +++ b/mock-registry/lib/index.js @@ -1,9 +1,15 @@ -const pacote = require('pacote') const Arborist = require('@npmcli/arborist') -const npa = require('npm-package-arg') const Nock = require('nock') +const npa = require('npm-package-arg') +const pacote = require('pacote') +const path = require('node:path') const stringify = require('json-stringify-safe') +const { createReadStream } = require('node:fs') +const fs = require('node:fs/promises') + +const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' + const logReq = (req, ...keys) => { const obj = JSON.parse(stringify(req)) const res = {} @@ -15,6 +21,27 @@ const logReq = (req, ...keys) => { return stringify(res, null, 2) } +// helper to convert old audit results to new bulk results +// TODO eventually convert the fixture files themselves +const auditToBulk = audit => { + const bulk = {} + for (const advisory in audit.advisories) { + const { + id, + url, + title, + severity = 'high', + /* eslint-disable-next-line camelcase */ + vulnerable_versions = '*', + module_name: name, + } = audit.advisories[advisory] + bulk[name] = bulk[name] || [] + /* eslint-disable-next-line camelcase */ + bulk[name].push({ id, url, title, severity, vulnerable_versions }) + } + return bulk +} + class MockRegistry { #tap #nock @@ -66,7 +93,6 @@ class MockRegistry { // find mistakes quicker instead of waiting for the entire test to end t.afterEach((t) => { t.strictSame(server.pendingMocks(), [], 'no pending mocks after each') - t.strictSame(server.activeMocks(), [], 'no active mocks after each') }) } @@ -74,6 +100,7 @@ class MockRegistry { Nock.enableNetConnect() server.done() Nock.emitter.off('no match', noMatch) + Nock.cleanAll() }) return server @@ -453,6 +480,48 @@ class MockRegistry { } } + // bulk advisory audit endpoint + audit ({ responseCode = 200, results = {}, convert = false, times = 1 } = {}) { + this.nock = this.nock + .post(this.fullPath('/-/npm/v1/security/advisories/bulk')) + .times(times) + .reply( + responseCode, + convert ? auditToBulk(results) : results + ) + } + + // Used in Arborist to mock the registry from fixture data on disk + // Will eat up all GET requests to the entire registry, so it probably doesn't work with the other GET routes very well. + mocks ({ dir }) { + const exists = (p) => fs.stat(p).then((s) => s).catch(() => false) + this.nock = this.nock.get(/.*/).reply(async function () { + const { headers, path: url } = this.req + const isCorgi = headers.accept.includes('application/vnd.npm.install-v1+json') + const encodedUrl = url.replace(/@/g, '').replace(/%2f/gi, '/') + const f = path.join(dir, 'registry-mocks', 'content', encodedUrl) + let file = f + let contentType = 'application/octet-stream' + if (isCorgi && await exists(`${f}.min.json`)) { + file = `${f}.min.json` + contentType = corgiDoc + } else if (await exists(`${f}.json`)) { + file = `${f}.json` + contentType = 'application/json' + } else if (await exists(`${f}/index.json`)) { + file = `${f}index.json` + contentType = 'application/json' + } + const stats = await exists(file) + if (stats) { + const body = createReadStream(file) + body.pause() + return [200, body, { 'content-type': contentType, 'content-length': stats.size }] + } + return [404, { error: 'not found' }] + }).persist() + } + /** * this is a simpler convience method for creating mockable registry with * tarballs for specific versions diff --git a/package-lock.json b/package-lock.json index 7e54d59dfb508..848a93eac50f2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16922,6 +16922,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.1", + "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", "benchmark": "^2.1.4", "minify-registry-metadata": "^4.0.0", diff --git a/workspaces/arborist/lib/audit-report.js b/workspaces/arborist/lib/audit-report.js index f7700ce9119de..836c2ed5a20ea 100644 --- a/workspaces/arborist/lib/audit-report.js +++ b/workspaces/arborist/lib/audit-report.js @@ -274,33 +274,6 @@ class AuditReport extends Map { throw new Error('do not call AuditReport.set() directly') } - // convert a quick-audit into a bulk advisory listing - static auditToBulk (report) { - if (!report.advisories) { - // tack on the report json where the response body would go - throw Object.assign(new Error('Invalid advisory report'), { - body: JSON.stringify(report), - }) - } - - const bulk = {} - const { advisories } = report - for (const advisory of Object.values(advisories)) { - const { - id, - url, - title, - severity = 'high', - vulnerable_versions = '*', - module_name: name, - } = advisory - bulk[name] = bulk[name] || [] - bulk[name].push({ id, url, title, severity, vulnerable_versions }) - } - - return bulk - } - async [_getReport] () { // if we're not auditing, just return false if (this.options.audit === false || this.options.offline === true || this.tree.inventory.size === 1) { @@ -309,39 +282,24 @@ class AuditReport extends Map { const timeEnd = time.start('auditReport:getReport') try { - try { - // first try the super fast bulk advisory listing - const body = prepareBulkData(this.tree, this[_omit], this.filterSet) - log.silly('audit', 'bulk request', body) - - // no sense asking if we don't have anything to audit, - // we know it'll be empty - if (!Object.keys(body).length) { - return null - } + const body = prepareBulkData(this.tree, this[_omit], this.filterSet) + log.silly('audit', 'bulk request', body) - const res = await fetch('/-/npm/v1/security/advisories/bulk', { - ...this.options, - registry: this.options.auditRegistry || this.options.registry, - method: 'POST', - gzip: true, - body, - }) - - return await res.json() - } catch (er) { - log.silly('audit', 'bulk request failed', String(er.body)) - // that failed, try the quick audit endpoint - const body = prepareData(this.tree, this.options) - const res = await fetch('/-/npm/v1/security/audits/quick', { - ...this.options, - registry: this.options.auditRegistry || this.options.registry, - method: 'POST', - gzip: true, - body, - }) - return AuditReport.auditToBulk(await res.json()) + // no sense asking if we don't have anything to audit, + // we know it'll be empty + if (!Object.keys(body).length) { + return null } + + const res = await fetch('/-/npm/v1/security/advisories/bulk', { + ...this.options, + registry: this.options.auditRegistry || this.options.registry, + method: 'POST', + gzip: true, + body, + }) + + return await res.json() } catch (er) { log.verbose('audit error', er) log.silly('audit error', String(er.body)) @@ -384,32 +342,4 @@ const prepareBulkData = (tree, omit, filterSet) => { return payload } -const prepareData = (tree, opts) => { - const { npmVersion: npm_version } = opts - const node_version = process.version - const { platform, arch } = process - const { NODE_ENV: node_env } = process.env - const data = tree.meta.commit() - // the legacy audit endpoint doesn't support any kind of pre-filtering - // we just have to get the advisories and skip over them in the report - return { - name: data.name, - version: data.version, - requires: { - ...(tree.package.devDependencies || {}), - ...(tree.package.peerDependencies || {}), - ...(tree.package.optionalDependencies || {}), - ...(tree.package.dependencies || {}), - }, - dependencies: data.dependencies, - metadata: { - node_version, - npm_version, - platform, - arch, - node_env, - }, - } -} - module.exports = AuditReport diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 52add84af1496..946458d761498 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -41,6 +41,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.1", + "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", "benchmark": "^2.1.4", "minify-registry-metadata": "^4.0.0", @@ -82,7 +83,7 @@ "test-env": [ "LC_ALL=sk" ], - "timeout": "360", + "timeout": "720", "nyc-arg": [ "--exclude", "tap-snapshots/**" diff --git a/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs b/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs index de205053a2cd4..8b37abd84e1f6 100644 --- a/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs +++ b/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs @@ -159887,6 +159887,7 @@ ArboristNode { "location": "node_modules/foo", "name": "foo", "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-workspaces-should-allow-cyclic-peer-dependencies-between-workspaces-and-packages-from-a-repository/node_modules/foo", + "resolved": "https://registry.npmjs.org/foo/-/foo-1.0.0.tgz", "version": "1.0.0", }, "workspace-a" => ArboristLink { diff --git a/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs b/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs index ffe63992d8c34..cec3560033241 100644 --- a/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs +++ b/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs @@ -169,7 +169,7 @@ exports[`test/arborist/reify.js TAP add a dep present in the tree, with v1 shrin {"dependencies":{"once":"^1.4.0","wrappy":"^1.0.2"}} ` -exports[`test/arborist/reify.js TAP add a new pkg to a prefix that needs to be mkdirpd > should output a successful tree in mkdirp folder 1`] = ` +exports[`test/arborist/reify.js TAP add a new pkg to a prefix that needs to be mkdirpd not dry run > should output a successful tree in mkdirp folder 1`] = ` ArboristNode { "children": Map { "abbrev" => ArboristNode { @@ -183,7 +183,7 @@ ArboristNode { }, "location": "node_modules/abbrev", "name": "abbrev", - "path": "{CWD}/test/arborist/tap-testdir-reify-add-a-new-pkg-to-a-prefix-that-needs-to-be-mkdirpd/missing/path/to/root/node_modules/abbrev", + "path": "{CWD}/test/arborist/tap-testdir-reify-add-a-new-pkg-to-a-prefix-that-needs-to-be-mkdirpd-not-dry-run/missing/path/to/root/node_modules/abbrev", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", "version": "1.1.1", }, @@ -199,11 +199,11 @@ ArboristNode { "isProjectRoot": true, "location": "", "name": "root", - "path": "{CWD}/test/arborist/tap-testdir-reify-add-a-new-pkg-to-a-prefix-that-needs-to-be-mkdirpd/missing/path/to/root", + "path": "{CWD}/test/arborist/tap-testdir-reify-add-a-new-pkg-to-a-prefix-that-needs-to-be-mkdirpd-not-dry-run/missing/path/to/root", } ` -exports[`test/arborist/reify.js TAP add a new pkg to a prefix that needs to be mkdirpd > should place expected lockfile file into place 1`] = ` +exports[`test/arborist/reify.js TAP add a new pkg to a prefix that needs to be mkdirpd not dry run > should place expected lockfile file into place 1`] = ` { "name": "root", "lockfileVersion": 3, @@ -225,7 +225,7 @@ exports[`test/arborist/reify.js TAP add a new pkg to a prefix that needs to be m ` -exports[`test/arborist/reify.js TAP add a new pkg to a prefix that needs to be mkdirpd > should place expected package.json file into place 1`] = ` +exports[`test/arborist/reify.js TAP add a new pkg to a prefix that needs to be mkdirpd not dry run > should place expected package.json file into place 1`] = ` { "dependencies": { "abbrev": "^1.1.1" @@ -17900,6 +17900,7 @@ Object { "ruy": "bin/index.js", }, "integrity": "sha512-VYppDTCM6INWUMKlWiKws4nVMuCNU5h+xjF6lj/0y90rLq017/m8aEpNy4zQSZFV2qz66U/hRZwwlSLJ5l5JMQ==", + "license": "ISC", "resolved": "https://registry.npmjs.org/ruy/-/ruy-1.0.0.tgz", "version": "1.0.0", }, @@ -33046,6 +33047,7 @@ exports[`test/arborist/reify.js TAP save proper lockfile with bins when upgradin "version": "7.3.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==", + "license": "ISC", "bin": { "semver": "bin/semver.js" }, @@ -33073,6 +33075,7 @@ exports[`test/arborist/reify.js TAP save proper lockfile with bins when upgradin "version": "7.3.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==", + "license": "ISC", "bin": { "semver": "bin/semver.js" }, diff --git a/workspaces/arborist/test/arborist/audit.js b/workspaces/arborist/test/arborist/audit.js index 11205899057a3..aeb9ec42dd32b 100644 --- a/workspaces/arborist/test/arborist/audit.js +++ b/workspaces/arborist/test/arborist/audit.js @@ -1,26 +1,32 @@ const t = require('tap') -const { resolve } = require('node:path') +const { join, resolve } = require('node:path') const Arborist = require('../../lib/arborist/index.js') const { normalizePath, printTree } = require('../fixtures/utils.js') -const { auditResponse, advisoryBulkResponse, ...mockRegistry } = require('../fixtures/server.js') +const MockRegistry = require('@npmcli/mock-registry') const fixtures = resolve(__dirname, '../fixtures') const fixture = (t, p) => require(fixtures + '/reify-cases/' + p)(t) -t.before(mockRegistry.start) -t.teardown(mockRegistry.stop) - const cache = t.testdir() -const newArb = (path, options = {}) => - new Arborist({ path, cache, registry: mockRegistry.registry, ...options }) +const newArb = (path, options = {}) => new Arborist({ path, cache, ...options }) const cwd = normalizePath(process.cwd()) t.cleanSnapshot = s => s.split(cwd).join('{CWD}') - .split(mockRegistry.registry).join('https://registry.npmjs.org/') + +const createRegistry = (t) => { + const registry = new MockRegistry({ + strict: true, + tap: t, + registry: 'https://registry.npmjs.org', + }) + return registry +} t.test('audit finds the bad deps', async t => { const path = resolve(fixtures, 'deprecated-dep') - t.teardown(auditResponse(resolve(fixtures, 'audit-nyc-mkdirp/audit.json'))) + const registry = createRegistry(t, false) + registry.audit({ convert: true, results: require(resolve(fixtures, 'audit-nyc-mkdirp', 'audit.json')) }) + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) const arb = newArb(path) const report = await arb.audit() t.equal(report.topVulns.size, 0) @@ -29,7 +35,9 @@ t.test('audit finds the bad deps', async t => { t.test('no package lock finds no bad deps', async t => { const path = resolve(fixtures, 'deprecated-dep') - t.teardown(auditResponse(resolve(fixtures, 'audit-nyc-mkdirp/audit.json'))) + const registry = createRegistry(t, false) + registry.audit({ convert: true, results: require(resolve(fixtures, 'audit-nyc-mkdirp', 'audit.json')) }) + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) const arb = newArb(path, { packageLock: false }) const report = await arb.audit() t.equal(report.topVulns.size, 0) @@ -38,22 +46,26 @@ t.test('no package lock finds no bad deps', async t => { t.test('audit fix reifies out the bad deps', async t => { const path = fixture(t, 'deprecated-dep') - t.teardown(auditResponse(resolve(fixtures, 'audit-nyc-mkdirp/audit.json'))) + const registry = createRegistry(t, false) + registry.audit({ convert: true, results: require(resolve(fixtures, 'audit-nyc-mkdirp', 'audit.json')) }) + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) const arb = newArb(path) const tree = printTree(await arb.audit({ fix: true })) t.matchSnapshot(tree, 'reified out the bad mkdirp and minimist') }) -t.test('audit does not do globals', t => - t.rejects(newArb('.', { global: true }).audit(), { +t.test('audit does not do globals', async t => { + await t.rejects(newArb('.', { global: true }).audit(), { message: '`npm audit` does not support testing globals', code: 'EAUDITGLOBAL', - })) + }) +}) t.test('audit in a workspace', async t => { const src = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(src, 'advisory-bulk.json') - t.teardown(advisoryBulkResponse(auditFile)) + const registry = createRegistry(t) + registry.audit({ results: require(resolve(src, 'advisory-bulk.json')) }) + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -104,8 +116,9 @@ t.test('audit in a workspace', async t => { t.test('audit with workspaces disabled', async t => { const src = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(src, 'advisory-bulk.json') - t.teardown(advisoryBulkResponse(auditFile)) + const registry = createRegistry(t) + registry.audit({ results: require(resolve(src, 'advisory-bulk.json')) }) + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) const path = t.testdir({ 'package.json': JSON.stringify({ diff --git a/workspaces/arborist/test/arborist/build-ideal-tree.js b/workspaces/arborist/test/arborist/build-ideal-tree.js index 2972a00b5580e..7adfb3fb35d96 100644 --- a/workspaces/arborist/test/arborist/build-ideal-tree.js +++ b/workspaces/arborist/test/arborist/build-ideal-tree.js @@ -4,34 +4,38 @@ if (process.platform === 'win32') { process.env.ARBORIST_DEBUG = 0 } -const { basename, resolve, relative } = require('node:path') +const { join, basename, resolve, relative } = require('node:path') const pacote = require('pacote') const t = require('tap') const Arborist = require('../..') const fixtures = resolve(__dirname, '../fixtures') // load the symbolic links that we depend on require(fixtures) -const { start, stop, registry, auditResponse } = require('../fixtures/server.js') const npa = require('npm-package-arg') const fs = require('node:fs') -const nock = require('nock') -const semver = require('semver') +const MockRegistry = require('@npmcli/mock-registry') -t.before(start) -t.teardown(stop) - -const cache = t.testdir() +const createRegistry = (t, mocks = false) => { + const registry = new MockRegistry({ + strict: true, + tap: t, + registry: 'https://registry.npmjs.org', + }) + if (mocks) { + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) + } + return registry +} -// track the warnings that are emitted. returns a function that removes -// the listener and provides the list of what it saw. -const warningTracker = () => { - const list = [] - const onlog = (...msg) => msg[0] === 'warn' && list.push(msg) +// track the warnings that are emitted. returns the list of what it saw +const warningTracker = (t) => { + const warnings = [] + const onlog = (...msg) => msg[0] === 'warn' && warnings.push(msg) process.on('log', onlog) - return () => { + t.teardown(() => { process.removeListener('log', onlog) - return list - } + }) + return warnings } const { @@ -53,83 +57,29 @@ This is a one-time fix-up, please be patient... const cwd = normalizePath(process.cwd()) t.cleanSnapshot = s => s.split(cwd).join('{CWD}') - .split(registry).join('https://registry.npmjs.org/') - -const printIdeal = (path, opt) => buildIdeal(path, opt).then(printTree) +const cache = t.testdir() // give it a very long timeout so CI doesn't crash as easily -const OPT = { cache, registry, timeout: 30 * 60 * 1000 } - -const newArb = (path, opt = {}) => new Arborist({ ...OPT, path, ...opt }) +const newArb = (path, opt = {}) => new Arborist({ timeout: 30 * 60 * 1000, path, cache, ...opt }) const buildIdeal = (path, opt) => newArb(path, opt).buildIdealTree(opt) - -const generateNocks = (t, spec) => { - nock.disableNetConnect() - - const getDeps = (version, deps) => - (deps || []).reduce((result, dep) => { - if (typeof dep === 'string') { - return { - ...result, - [dep]: version, - } - } else { - return { - ...result, - ...(version in dep ? { [dep[version]]: version } : {}), - } - } - }, {}) - - for (const name in spec) { - const pkg = spec[name] - - const packument = { - name, - 'dist-tags': { - latest: pkg.latest || semver.maxSatisfying(pkg.versions, '*'), - }, - versions: pkg.versions.reduce((versions, version) => { - return { - ...versions, - [version]: { - name, - version, - dependencies: getDeps(version, pkg.dependencies), - peerDependencies: getDeps(version, pkg.peerDependencies), - }, - } - }, {}), - } - - nock(registry) - .persist() - .get(`/${name}`) - .reply(200, packument) - } - - t.teardown(async () => { - nock.enableNetConnect() - nock.cleanAll() - }) -} +const printIdeal = (path, opt) => buildIdeal(path, opt).then(printTree) t.test('fail on mismatched engine when engineStrict is set', async t => { const path = resolve(fixtures, 'engine-specification') - t.rejects(buildIdeal(path, { - ...OPT, + await t.rejects(buildIdeal(path, { nodeVersion: '12.18.4', engineStrict: true, - }).then(() => { - throw new Error('failed to fail') - }), { code: 'EBADENGINE' }) + }), + { code: 'EBADENGINE' }, + 'should fail with EBADENGINE error' + ) }) -t.test('fail on malformed package.json', t => { +t.test('fail on malformed package.json', async t => { const path = resolve(fixtures, 'malformed-json') - return t.rejects( + await t.rejects( buildIdeal(path), { code: 'EJSONPARSE' }, 'should fail with EJSONPARSE error' @@ -139,7 +89,6 @@ t.test('fail on malformed package.json', t => { t.test('ignore mismatched engine for optional dependencies', async () => { const path = resolve(fixtures, 'optional-engine-specification') await buildIdeal(path, { - ...OPT, nodeVersion: '12.18.4', engineStrict: true, }) @@ -147,52 +96,51 @@ t.test('ignore mismatched engine for optional dependencies', async () => { t.test('warn on mismatched engine when engineStrict is false', t => { const path = resolve(fixtures, 'engine-specification') - const check = warningTracker() + createRegistry(t, false) + const warnings = warningTracker(t) return buildIdeal(path, { - ...OPT, nodeVersion: '12.18.4', engineStrict: false, - }).then(() => t.match(check(), [ + }).then(() => t.match(warnings, [ ['warn', 'EBADENGINE'], ])) }) t.test('fail on mismatched platform', async t => { const path = resolve(fixtures, 'platform-specification') - t.rejects(buildIdeal(path, { - ...OPT, + createRegistry(t, true) + await t.rejects(buildIdeal(path, { nodeVersion: '4.0.0', - }).then(() => { - throw new Error('failed to fail') }), { code: 'EBADPLATFORM' }) }) t.test('ignore mismatched platform for optional dependencies', async t => { const path = resolve(fixtures, 'optional-platform-specification') + createRegistry(t, true) const tree = await buildIdeal(path, { - ...OPT, nodeVersion: '12.18.4', engineStrict: true, }) t.equal(tree.children.get('platform-specifying-test-package').package.version, '1.0.0', 'added the optional dep to the ideal tree') }) -t.test('no options', t => { +t.test('no options', async t => { const arb = new Arborist() t.match( arb.registry, 'https://registry.npmjs.org', 'should use default registry' ) - t.end() }) t.test('a workspace with a conflicted nested duplicated dep', async t => { + createRegistry(t, true) t.matchSnapshot(await printIdeal(resolve(fixtures, 'workspace4'))) }) t.test('a tree with an outdated dep, missing dep, no lockfile', async t => { const path = resolve(fixtures, 'outdated-no-lockfile') + createRegistry(t, true) const tree = await buildIdeal(path) const expected = { once: '1.3.3', @@ -205,14 +153,17 @@ t.test('a tree with an outdated dep, missing dep, no lockfile', async t => { t.matchSnapshot(printTree(tree), 'should not update all') }) -t.test('tarball deps with transitive tarball deps', t => - t.resolveMatchSnapshot(printIdeal( - resolve(fixtures, 'tarball-dependencies')))) +t.test('tarball deps with transitive tarball deps', async t => { + createRegistry(t, false) + await t.resolveMatchSnapshot(printIdeal( + resolve(fixtures, 'tarball-dependencies'))) +}) t.test('testing-peer-deps-overlap package', async t => { const path = resolve(fixtures, 'testing-peer-deps-overlap') + createRegistry(t, true) const idealTree = await buildIdeal(path) - const arb = new Arborist({ path, idealTree, ...OPT }) + const arb = newArb(path, { idealTree }) const tree2 = await arb.buildIdealTree() t.equal(tree2, idealTree) t.matchSnapshot(printTree(idealTree), 'build ideal tree with overlapping peer dep ranges') @@ -220,89 +171,98 @@ t.test('testing-peer-deps-overlap package', async t => { t.test('testing-peer-deps package', async t => { const path = resolve(fixtures, 'testing-peer-deps') + createRegistry(t, true) const idealTree = await buildIdeal(path) - const arb = new Arborist({ path, idealTree, ...OPT }) + const arb = newArb(path, { idealTree }) const tree2 = await arb.buildIdealTree() t.equal(tree2, idealTree) t.matchSnapshot(printTree(idealTree), 'build ideal tree with peer deps') }) -t.test('testing-peer-deps package with symlinked root', t => { +t.test('testing-peer-deps package with symlinked root', async t => { const path = resolve(fixtures, 'testing-peer-deps-link') - return buildIdeal(path).then(idealTree => { - t.ok(idealTree.isLink, 'ideal tree is rooted on a Link') - return new Arborist({ path, idealTree, ...OPT }) - .buildIdealTree().then(tree2 => t.equal(tree2, idealTree)) - .then(() => t.matchSnapshot(printTree(idealTree), 'build ideal tree with peer deps')) - }) + createRegistry(t, true) + const idealTree = await buildIdeal(path) + t.ok(idealTree.isLink, 'ideal tree is rooted on a Link') + const arb = newArb(path, { idealTree }) + const tree2 = await arb.buildIdealTree() + t.equal(tree2, idealTree) + t.matchSnapshot(printTree(idealTree), 'build ideal tree with peer deps') }) -t.test('testing-peer-deps nested', t => { +t.test('testing-peer-deps nested', async t => { const path = resolve(fixtures, 'testing-peer-deps-nested') - return t.resolveMatchSnapshot(printIdeal(path), 'build ideal tree') - .then(() => t.resolveMatchSnapshot(printIdeal(path, { - // hit the branch where update is just a list of names - update: ['@isaacs/testing-peer-deps'], - }), 'can update a peer dep cycle')) + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'build ideal tree') + await t.resolveMatchSnapshot(printIdeal(path, { + // hit the branch where update is just a list of names + update: ['@isaacs/testing-peer-deps'], + }), 'can update a peer dep cycle') }) -t.test('tap vs react15', t => { +t.test('tap vs react15', async t => { const path = resolve(fixtures, 'tap-react15-collision') - return t.resolveMatchSnapshot(printIdeal(path), 'build ideal tree with tap collision') + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'build ideal tree with tap collision') }) -t.test('tap vs react15 with legacy shrinkwrap', t => { +t.test('tap vs react15 with legacy shrinkwrap', async t => { const path = resolve(fixtures, 'tap-react15-collision-legacy-sw') - return t.resolveMatchSnapshot(printIdeal(path), 'tap collision with legacy sw file') + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'tap collision with legacy sw file') }) -t.test('bad shrinkwrap file', t => { +t.test('bad shrinkwrap file', async t => { const path = resolve(fixtures, 'testing-peer-deps-bad-sw') - return t.resolveMatchSnapshot(printIdeal(path), 'bad shrinkwrap') + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'bad shrinkwrap') }) -t.test('a direct link dep has a dep with optional dependencies', t => { +t.test('a direct link dep has a dep with optional dependencies', async t => { const path = resolve(fixtures, 'link-dep-has-dep-with-optional-dep') - return t.resolveMatchSnapshot(printIdeal(path), 'should not mark children of the optional dep as extraneous') + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'should not mark children of the optional dep as extraneous') }) -t.test('cyclical peer deps', t => { +t.test('cyclical peer deps', async t => { const paths = [ resolve(fixtures, 'peer-dep-cycle'), resolve(fixtures, 'peer-dep-cycle-with-sw'), ] - - t.plan(paths.length) - paths.forEach(path => t.test(basename(path), t => - t.resolveMatchSnapshot(printIdeal(path), 'cyclical peer deps') - .then(() => t.resolveMatchSnapshot(printIdeal(path, { + createRegistry(t, true) + for (const path of paths) { + await t.test(basename(path), async t => { + await t.resolveMatchSnapshot(printIdeal(path), 'cyclical peer deps') + await t.resolveMatchSnapshot(printIdeal(path, { // just reload the dep at its current required version add: ['@isaacs/peer-dep-cycle-a'], - }), 'cyclical peer deps - reload a dependency')) - .then(() => t.resolveMatchSnapshot(printIdeal(path, { + }), 'cyclical peer deps - reload a dependency') + await t.resolveMatchSnapshot(printIdeal(path, { add: ['@isaacs/peer-dep-cycle-a@2.x'], - }), 'cyclical peer deps - upgrade a package')) - .then(() => t.rejects(printIdeal(path, { + }), 'cyclical peer deps - upgrade a package') + await t.rejects(printIdeal(path, { // this conflicts with the direct dep on a@1 PEER-> b@1 add: ['@isaacs/peer-dep-cycle-b@2.x'], - }))) - // this conflict is ok since we're using legacy peer deps - .then(() => t.resolveMatchSnapshot(printIdeal(path, { + })) + // this conflict is ok since we're using legacy peer deps + await t.resolveMatchSnapshot(printIdeal(path, { add: ['@isaacs/peer-dep-cycle-b@2.x'], legacyPeerDeps: true, - }), 'add b@2.x with legacy peer deps')) - .then(() => t.resolveMatchSnapshot(printIdeal(path, { + }), 'add b@2.x with legacy peer deps') + await t.resolveMatchSnapshot(printIdeal(path, { // use @latest rather than @2.x to exercise the 'explicit tag' path add: ['@isaacs/peer-dep-cycle-b@latest'], rm: ['@isaacs/peer-dep-cycle-a'], - }), 'can add b@2 if we remove a@1 dep')) - .then(() => t.resolveMatchSnapshot(printIdeal(path, { + }), 'can add b@2 if we remove a@1 dep') + await t.resolveMatchSnapshot(printIdeal(path, { rm: ['@isaacs/peer-dep-cycle-a'], - }), 'remove the dep, prune everything')) - )) + }), 'remove the dep, prune everything') + }) + } }) -t.test('nested cyclical peer deps', t => { +t.test('nested cyclical peer deps', async t => { + const registry = createRegistry(t, true) const paths = [ resolve(fixtures, 'peer-dep-cycle-nested'), resolve(fixtures, 'peer-dep-cycle-nested-with-sw'), @@ -320,96 +280,100 @@ t.test('nested cyclical peer deps', t => { }, } - t.plan(paths.length) - paths.forEach(path => t.test(basename(path), async t => { - t.matchSnapshot(await printIdeal(path), 'nested peer deps cycle') - - t.matchSnapshot(await printIdeal(path, { - // just make sure it works if it gets a spec object - add: [npa('@isaacs/peer-dep-cycle-a@2.x')], - }), 'upgrade a') - - t.matchSnapshot(await printIdeal(path, { - // a dep whose name we don't yet know - add: [ - '@isaacs/peer-dep-cycle-a@2.x', - `${registry}@isaacs/peer-dep-cycle-b/-/peer-dep-cycle-b-2.0.0.tgz`, - ], - }), 'upgrade b') - - t.matchSnapshot(await printIdeal(path, { - force: true, - add: ['@isaacs/peer-dep-cycle-c@2.x'], - }), 'upgrade c, forcibly') - - await t.rejects(printIdeal(path, { - add: [ - '@isaacs/peer-dep-cycle-a@1.x', - '@isaacs/peer-dep-cycle-c@2.x', - ], - }), ers[path], 'try (and fail) to upgrade c and a incompatibly') - })) + for (const path of paths) { + await t.test(basename(path), async t => { + await t.resolveMatchSnapshot(printIdeal(path), 'nested peer deps cycle') + await t.resolveMatchSnapshot(printIdeal(path, { + // just make sure it works if it gets a spec object + add: [npa('@isaacs/peer-dep-cycle-a@2.x')], + }), 'upgrade a') + await t.resolveMatchSnapshot(printIdeal(path, { + // a dep whose name we don't yet know + add: [ + '@isaacs/peer-dep-cycle-a@2.x', + `${registry.origin}/@isaacs/peer-dep-cycle-b/-/peer-dep-cycle-b-2.0.0.tgz`, + ], + }), 'upgrade b') + await t.resolveMatchSnapshot(printIdeal(path, { + force: true, + add: ['@isaacs/peer-dep-cycle-c@2.x'], + }), 'upgrade c, forcibly') + await t.rejects(printIdeal(path, { + add: [ + '@isaacs/peer-dep-cycle-a@1.x', + '@isaacs/peer-dep-cycle-c@2.x', + ], + }), ers[path], 'try (and fail) to upgrade c and a incompatibly') + }) + } }) -t.test('dedupe example - not deduped', t => { +t.test('dedupe example - not deduped', async t => { + createRegistry(t, true) const path = resolve(fixtures, 'dedupe-tests') - return t.resolveMatchSnapshot(printIdeal(path), 'dedupe testing') + await t.resolveMatchSnapshot(printIdeal(path), 'dedupe testing') }) -t.test('dedupe example - deduped because preferDedupe=true', t => { +t.test('dedupe example - deduped because preferDedupe=true', async t => { + createRegistry(t, true) const path = resolve(fixtures, 'dedupe-tests') - return t.resolveMatchSnapshot(printIdeal(path, { preferDedupe: true })) + await t.resolveMatchSnapshot(printIdeal(path, { preferDedupe: true })) }) -t.test('dedupe example - nested because legacyBundling=true', t => { +t.test('dedupe example - nested because legacyBundling=true', async t => { const path = resolve(fixtures, 'dedupe-tests') - return t.resolveMatchSnapshot(printIdeal(path, { + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path, { installStrategy: 'nested', preferDedupe: true, })) }) -t.test('dedupe example - deduped', t => { +t.test('dedupe example - deduped', async t => { const path = resolve(fixtures, 'dedupe-tests-2') - return t.resolveMatchSnapshot(printIdeal(path), 'dedupe testing') + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'dedupe testing') }) t.test('expose explicitRequest', async t => { const path = resolve(fixtures, 'simple') - const arb = new Arborist({ ...OPT, path }) + createRegistry(t, true) + const arb = newArb(path) await arb.buildIdealTree({ add: ['abbrev'] }) t.match(arb.explicitRequests, Set, 'exposes the explicit request Set') t.strictSame([...arb.explicitRequests].map(e => e.name), ['abbrev']) - t.end() }) -t.test('bundle deps example 1, empty', t => { +t.test('bundle deps example 1, empty', async t => { // NB: this results in ignoring the bundled deps when building the // ideal tree. When we reify, we'll have to ignore the deps that // got placed as part of the bundle. const path = resolve(fixtures, 'testing-bundledeps-empty') - return t.resolveMatchSnapshot(printIdeal(path), 'bundle deps testing') - .then(() => t.resolveMatchSnapshot(printIdeal(path, { - saveBundle: true, - add: ['@isaacs/testing-bundledeps'], - }), 'should have some missing deps in the ideal tree')) + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'bundle deps testing') + await t.resolveMatchSnapshot(printIdeal(path, { + saveBundle: true, + add: ['@isaacs/testing-bundledeps'], + }), 'should have some missing deps in the ideal tree') }) -t.test('bundle deps example 1, full', t => { +t.test('bundle deps example 1, full', async t => { // In this test, bundle deps show up, because they're present in // the actual tree to begin with. const path = resolve(fixtures, 'testing-bundledeps') - return t.resolveMatchSnapshot(printIdeal(path), 'no missing deps') - .then(() => t.resolveMatchSnapshot(printIdeal(path, { - saveBundle: true, - add: ['@isaacs/testing-bundledeps'], - }), 'add stuff, no missing deps')) + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'no missing deps') + await t.resolveMatchSnapshot(printIdeal(path, { + saveBundle: true, + add: ['@isaacs/testing-bundledeps'], + }), 'add stuff, no missing deps') }) t.test('bundle deps example 1, complete:true', async t => { // When complete:true is set, we extract into a temp dir to read // the bundled deps, so they ARE included, just like during reify() const path = resolve(fixtures, 'testing-bundledeps-empty') + createRegistry(t, true) // wrap pacote.extract in a spy so we can be sure the integrity and resolved // options both made it through @@ -427,141 +391,149 @@ t.test('bundle deps example 1, complete:true', async t => { return res } - t.matchSnapshot(await printIdeal(path, { + await t.resolveMatchSnapshot(printIdeal(path, { complete: true, }), 'no missing deps, because complete: true') - t.matchSnapshot(await printIdeal(path, { + await t.resolveMatchSnapshot(printIdeal(path, { saveBundle: true, add: ['@isaacs/testing-bundledeps'], complete: true, }), 'no missing deps, because complete: true, add dep, save bundled') }) -t.test('bundle deps example 2', t => { +t.test('bundle deps example 2', async t => { // bundled deps at the root level are NOT ignored when building ideal trees const path = resolve(fixtures, 'testing-bundledeps-2') - return t.resolveMatchSnapshot(printIdeal(path), 'bundle deps testing') - .then(() => t.resolveMatchSnapshot(printIdeal(path, { - saveBundle: true, - add: ['@isaacs/testing-bundledeps-c'], - }), 'add new bundled dep c')) - .then(() => t.resolveMatchSnapshot(printIdeal(path, { - rm: ['@isaacs/testing-bundledeps-a'], - }), 'remove bundled dependency a')) + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'bundle deps testing') + await t.resolveMatchSnapshot(printIdeal(path, { + saveBundle: true, + add: ['@isaacs/testing-bundledeps-c'], + }), 'add new bundled dep c') + await t.resolveMatchSnapshot(printIdeal(path, { + rm: ['@isaacs/testing-bundledeps-a'], + }), 'remove bundled dependency a') }) -t.test('bundle deps example 2, link', t => { +t.test('bundle deps example 2, link', async t => { // bundled deps at the root level are NOT ignored when building ideal trees const path = resolve(fixtures, 'testing-bundledeps-link') - return t.resolveMatchSnapshot(printIdeal(path), 'bundle deps testing') - .then(() => t.resolveMatchSnapshot(printIdeal(path, { - saveBundle: true, - add: ['@isaacs/testing-bundledeps-c'], - }), 'add new bundled dep c')) - .then(() => t.resolveMatchSnapshot(printIdeal(path, { - rm: ['@isaacs/testing-bundledeps-a'], - }), 'remove bundled dependency a')) + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'bundle deps testing') + await t.resolveMatchSnapshot(printIdeal(path, { + saveBundle: true, + add: ['@isaacs/testing-bundledeps-c'], + }), 'add new bundled dep c') + await t.resolveMatchSnapshot(printIdeal(path, { + rm: ['@isaacs/testing-bundledeps-a'], + }), 'remove bundled dependency a') }) -t.test('unresolvable peer deps', t => { +t.test('unresolvable peer deps', async t => { const path = resolve(fixtures, 'testing-peer-deps-unresolvable') + createRegistry(t, true) - return t.rejects(printIdeal(path, { strictPeerDeps: true }), { + await t.rejects(printIdeal(path, { strictPeerDeps: true }), { message: 'unable to resolve dependency tree', code: 'ERESOLVE', }, 'unacceptable') }) -t.test('do not add shrinkwrapped deps', t => { +t.test('do not add shrinkwrapped deps', async t => { const path = resolve(fixtures, 'shrinkwrapped-dep-no-lock') - return t.resolveMatchSnapshot(printIdeal(path, { update: true })) + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path, { update: true })) }) -t.test('do add shrinkwrapped deps when complete:true is set', t => { +t.test('do add shrinkwrapped deps when complete:true is set', async t => { const path = resolve(fixtures, 'shrinkwrapped-dep-no-lock') - return t.resolveMatchSnapshot(printIdeal(path, { + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path, { complete: true, update: true, })) }) -t.test('do not update shrinkwrapped deps', t => { +t.test('do not update shrinkwrapped deps', async t => { const path = resolve(fixtures, 'shrinkwrapped-dep-with-lock') - return t.resolveMatchSnapshot(printIdeal(path, + createRegistry(t, false) + await t.resolveMatchSnapshot(printIdeal(path, { update: { names: ['abbrev'] } })) }) -t.test('do not update shrinkwrapped deps, ignore lockfile', t => { +t.test('do not update shrinkwrapped deps, ignore lockfile', async t => { const path = resolve(fixtures, 'shrinkwrapped-dep-with-lock') - return t.resolveMatchSnapshot(printIdeal(path, + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path, { packageLock: false, update: { names: ['abbrev'] } })) }) -t.test('do not update shrinkwrapped deps when complete:true is set', t => { +t.test('do not update shrinkwrapped deps when complete:true is set', async t => { const path = resolve(fixtures, 'shrinkwrapped-dep-with-lock') - return t.resolveMatchSnapshot(printIdeal(path, + createRegistry(t, false) + await t.resolveMatchSnapshot(printIdeal(path, { update: { names: ['abbrev'] }, complete: true })) }) -t.test('deduped transitive deps with asymmetrical bin declaration', t => { - const path = - resolve(fixtures, 'testing-asymmetrical-bin-no-lock') - return t.resolveMatchSnapshot(printIdeal(path), 'with no lockfile') +t.test('deduped transitive deps with asymmetrical bin declaration', async t => { + const path = resolve(fixtures, 'testing-asymmetrical-bin-no-lock') + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(path), 'with no lockfile') }) -t.test('deduped transitive deps with asymmetrical bin declaration', t => { - const path = - resolve(fixtures, 'testing-asymmetrical-bin-with-lock') - return t.resolveMatchSnapshot(printIdeal(path), 'with lockfile') +t.test('deduped transitive deps with asymmetrical bin declaration', async t => { + const path = resolve(fixtures, 'testing-asymmetrical-bin-with-lock') + createRegistry(t, false) + await t.resolveMatchSnapshot(printIdeal(path), 'with lockfile') }) -t.test('update', t => { - t.test('flow outdated', t => { +t.test('update', async t => { + await t.test('flow outdated', async t => { + createRegistry(t, true) const flowOutdated = resolve(fixtures, 'flow-outdated') - - t.resolveMatchSnapshot(printIdeal(flowOutdated, { + await t.resolveMatchSnapshot(printIdeal(flowOutdated, { update: { names: ['flow-parser'], }, }), 'update flow parser') - - t.resolveMatchSnapshot(printIdeal(flowOutdated, { + await t.resolveMatchSnapshot(printIdeal(flowOutdated, { update: true, }), 'update everything') - - t.end() }) - t.test('tap and flow', t => { + await t.test('tap and flow', async t => { + createRegistry(t, true) const tapAndFlow = resolve(fixtures, 'tap-and-flow') - t.resolveMatchSnapshot(printIdeal(tapAndFlow, { + await t.resolveMatchSnapshot(printIdeal(tapAndFlow, { update: { all: true, }, }), 'update everything') - t.resolveMatchSnapshot(printIdeal(tapAndFlow, { + await t.resolveMatchSnapshot(printIdeal(tapAndFlow, { update: { names: ['ink'], }, }), 'update ink') - - t.end() }) - - t.end() }) -t.test('link meta deps', t => - t.resolveMatchSnapshot(printIdeal( - resolve(fixtures, 'link-meta-deps-empty')))) +t.test('link meta deps', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal( + resolve(fixtures, 'link-meta-deps-empty'))) +}) -t.test('respect the yarn.lock file', t => - t.resolveMatchSnapshot(printIdeal( - resolve(fixtures, 'yarn-lock-mkdirp')))) +t.test('respect the yarn.lock file', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal( + resolve(fixtures, 'yarn-lock-mkdirp'))) +}) -t.test('respect the yarn.lock file version, if lacking resolved', t => - t.resolveMatchSnapshot(printIdeal( - resolve(fixtures, 'yarn-lock-mkdirp-no-resolved')))) +t.test('respect the yarn.lock file version, if lacking resolved', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal( + resolve(fixtures, 'yarn-lock-mkdirp-no-resolved'))) +}) t.test('optional dependency failures', async t => { const cases = [ @@ -571,34 +543,37 @@ t.test('optional dependency failures', async t => { 'optional-metadep-enotarget', 'optional-metadep-missing', ] - t.plan(cases.length) + createRegistry(t, true) for (const c of cases) { const tree = await printIdeal(resolve(fixtures, c)) t.matchSnapshot(tree, c) } }) -t.test('prod dependency failures', t => { +t.test('prod dependency failures', async t => { const cases = [ 'prod-dep-enotarget', 'prod-dep-missing', ] - t.plan(cases.length) - cases.forEach(c => t.rejects(printIdeal( - resolve(fixtures, c)), c)) + createRegistry(t, true) + for (const c of cases) { + await t.rejects(printIdeal(resolve(fixtures, c)), c) + } }) -t.test('link dep with a link dep', t => { +t.test('link dep with a link dep', async t => { const path = resolve(fixtures, 'cli-750') - return Promise.all([ + createRegistry(t, false) + await Promise.all([ t.resolveMatchSnapshot(printIdeal(path), 'link metadeps with lockfile'), t.resolveMatchSnapshot(printIdeal(path, { update: true }), 'link metadeps without lockfile'), ]) }) -t.test('link dep within node_modules and outside root', t => { +t.test('link dep within node_modules and outside root', async t => { const path = resolve(fixtures, 'external-link-dep') - return Promise.all([ + createRegistry(t, true) + await Promise.all([ t.resolveMatchSnapshot(printIdeal(path), 'linky deps with lockfile'), t.resolveMatchSnapshot(printIdeal(path, { update: true }), 'linky deps without lockfile'), t.resolveMatchSnapshot(printIdeal(path, { follow: true }), 'linky deps followed'), @@ -606,27 +581,37 @@ t.test('link dep within node_modules and outside root', t => { ]) }) -t.test('global style', t => t.resolveMatchSnapshot(printIdeal(t.testdir(), { - installStrategy: 'shallow', - add: ['rimraf'], -}))) +t.test('global style', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(t.testdir(), { + installStrategy: 'shallow', + add: ['rimraf'], + })) +}) -t.test('global', t => t.resolveMatchSnapshot(printIdeal(t.testdir(), { - global: true, - add: ['rimraf'], -}))) +t.test('global', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printIdeal(t.testdir(), { + global: true, + add: ['rimraf'], + })) +}) -t.test('global has to add or remove', t => t.rejects(printIdeal(t.testdir(), { - global: true, -}))) +t.test('global has to add or remove', async t => { + createRegistry(t, false) + await t.rejects(printIdeal(t.testdir(), { + global: true, + })) +}) -// somewhat copy-pasta from the test/arborist/audit.js to exercise -// the buildIdealTree code paths +// // somewhat copy-pasta from the test/arborist/audit.js to exercise +// // the buildIdealTree code paths t.test('update mkdirp to non-minimist-using version', async t => { const path = resolve(fixtures, 'deprecated-dep') - t.teardown(auditResponse(resolve(fixtures, 'audit-nyc-mkdirp/audit.json'))) + const registry = createRegistry(t, true) + registry.audit({ convert: true, results: require('../fixtures/audit-nyc-mkdirp/audit.json') }) - const arb = new Arborist({ path, ...OPT }) + const arb = newArb(path) await arb.audit() t.matchSnapshot(printTree(await arb.buildIdealTree())) @@ -634,14 +619,11 @@ t.test('update mkdirp to non-minimist-using version', async t => { t.test('force a new nyc (and update mkdirp nicely)', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') - t.teardown(auditResponse(resolve(fixtures, 'audit-nyc-mkdirp/audit.json'))) - - const arb = new Arborist({ - force: true, - path, - ...OPT, - }) + const registry = createRegistry(t, true) + // TODO why does this infinite loop if no results? + registry.audit({ convert: true, results: require('../fixtures/audit-nyc-mkdirp/audit.json') }) + const arb = newArb(path, { force: true }) await arb.audit() t.matchSnapshot(printTree(await arb.buildIdealTree())) t.equal(arb.idealTree.children.get('mkdirp').package.version, '0.5.5') @@ -650,13 +632,10 @@ t.test('force a new nyc (and update mkdirp nicely)', async t => { t.test('force a new mkdirp (but not semver major)', async t => { const path = resolve(fixtures, 'mkdirp-pinned') - t.teardown(auditResponse(resolve(fixtures, 'audit-nyc-mkdirp/audit.json'))) + const registry = createRegistry(t, true) + registry.audit({ convert: true, results: require('../fixtures/audit-nyc-mkdirp/audit.json') }) - const arb = new Arborist({ - force: true, - path, - ...OPT, - }) + const arb = newArb(path, { force: true }) await arb.audit() t.matchSnapshot(printTree(await arb.buildIdealTree())) @@ -665,6 +644,7 @@ t.test('force a new mkdirp (but not semver major)', async t => { }) t.test('empty update should not trigger old lockfile', async t => { + createRegistry(t, false) const path = t.testdir({ 'package.json': JSON.stringify({ name: 'empty-update', @@ -683,15 +663,16 @@ t.test('empty update should not trigger old lockfile', async t => { }, }), }) - const checkLogs = warningTracker() + const warnings = warningTracker(t) const arb = newArb(path) await arb.reify({ update: true }) - t.strictSame(checkLogs(), []) + t.strictSame(warnings, []) }) t.test('update v3 doesnt downgrade lockfile', async t => { + createRegistry(t, false) const fixt = t.testdir({ 'package-lock.json': JSON.stringify({ name: 'empty-update-v3', @@ -721,19 +702,16 @@ t.test('update v3 doesnt downgrade lockfile', async t => { t.test('no fix available', async t => { const path = resolve(fixtures, 'audit-mkdirp/mkdirp-unfixable') - const checkLogs = warningTracker() - t.teardown(auditResponse(resolve(path, 'audit.json'))) + const warnings = warningTracker(t) + const registry = createRegistry(t, true) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) - const arb = new Arborist({ - force: true, - path, - ...OPT, - }) + const arb = newArb(path, { force: true }) await arb.audit() t.matchSnapshot(printTree(await arb.buildIdealTree())) t.equal(arb.idealTree.children.get('mkdirp').package.version, '0.5.1') - t.match(checkLogs(), [ + t.match(warnings, [ oldLockfileWarning, ['warn', 'audit', 'No fix available for mkdirp@*'], ]) @@ -741,18 +719,15 @@ t.test('no fix available', async t => { t.test('no fix available, linked top package', async t => { const path = resolve(fixtures, 'audit-mkdirp') - const checkLogs = warningTracker() - t.teardown(auditResponse(resolve(path, 'mkdirp-unfixable/audit.json'))) + const warnings = warningTracker(t) + const registry = createRegistry(t, true) + registry.audit({ convert: true, results: require(resolve(path, 'mkdirp-unfixable', 'audit.json')) }) - const arb = new Arborist({ - force: true, - path, - ...OPT, - }) + const arb = newArb(path, { force: true }) await arb.audit() t.matchSnapshot(printTree(await arb.buildIdealTree())) - t.strictSame(checkLogs(), [ + t.strictSame(warnings, [ oldLockfileWarning, ['warn', 'audit', 'Manual fix required in linked project at ./mkdirp-unfixable for mkdirp@*.\n' + @@ -760,71 +735,84 @@ t.test('no fix available, linked top package', async t => { ]]) }) -t.test('workspaces', t => { - t.test('should install a simple example', t => { +t.test('workspaces', async t => { + await t.test('should install a simple example', async t => { + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-simple') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should update a simple example', t => { + await t.test('should update a simple example', async t => { + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-simple') - return t.resolveMatchSnapshot(printIdeal(path, { update: { all: true } })) + await t.resolveMatchSnapshot(printIdeal(path, { update: { all: true } })) }) - t.test('should install a simple scoped pkg example', t => { + await t.test('should install a simple scoped pkg example', async t => { + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-scoped-pkg') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should not work with duplicate names', t => { + await t.test('should not work with duplicate names', async t => { + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-duplicate') - return t.rejects(printIdeal(path), { code: 'EDUPLICATEWORKSPACE' }, 'throws EDUPLICATEWORKSPACE error') + await t.rejects(printIdeal(path), { code: 'EDUPLICATEWORKSPACE' }, 'throws EDUPLICATEWORKSPACE error') }) - t.test('should install shared dependencies into root folder', t => { + await t.test('should install shared dependencies into root folder', async t => { + createRegistry(t, true) const path = resolve(__dirname, '../fixtures/workspaces-shared-deps') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should install conflicting dep versions', t => { + await t.test('should install conflicting dep versions', async t => { + createRegistry(t, true) const path = resolve(__dirname, '../fixtures/workspaces-conflicting-versions') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should prefer linking nested workspaces', t => { + await t.test('should prefer linking nested workspaces', async t => { + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-prefer-linking') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should install from registry on version not satisfied', t => { + await t.test('should install from registry on version not satisfied', async t => { + createRegistry(t, true) const path = resolve(__dirname, '../fixtures/workspaces-version-unsatisfied') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should link top level nested workspaces', t => { + await t.test('should link top level nested workspaces', async t => { + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-top-level-link') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should install workspace transitive dependencies', t => { + await t.test('should install workspace transitive dependencies', async t => { + createRegistry(t, true) const path = resolve(__dirname, '../fixtures/workspaces-transitive-deps') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should ignore nested node_modules folders', t => { + await t.test('should ignore nested node_modules folders', async t => { // packages/a/node_modules/nested-workspaces should not be installed + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-ignore-nm') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should work with files spec', t => { + await t.test('should work with files spec', async t => { + createRegistry(t, false) const path = resolve(__dirname, '../fixtures/workspaces-with-files-spec') - return t.resolveMatchSnapshot(printIdeal(path)) + await t.resolveMatchSnapshot(printIdeal(path)) }) - t.test('should handle conflicting peer deps ranges', t => { + await t.test('should handle conflicting peer deps ranges', async t => { + createRegistry(t, true) const path = resolve(__dirname, '../fixtures/workspaces-peer-ranges') - return t.rejects( + await t.rejects( printIdeal(path), { code: 'ERESOLVE', @@ -833,10 +821,8 @@ t.test('workspaces', t => { ) }) - t.test('should allow adding a workspace as a dep to a workspace', async t => { - // turn off networking, this should never make a registry request - nock.disableNetConnect() - t.teardown(() => nock.enableNetConnect()) + await t.test('should allow adding a workspace as a dep to a workspace', async t => { + createRegistry(t, false) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -857,11 +843,7 @@ t.test('workspaces', t => { }, }) - const arb = new Arborist({ - ...OPT, - path, - workspaces: ['workspace-a'], - }) + const arb = newArb(path, { workspaces: ['workspace-a'] }) const tree = arb.buildIdealTree({ path, @@ -877,13 +859,15 @@ t.test('workspaces', t => { t.matchSnapshot(printTree(await tree)) }) - t.test('should allow cyclic peer dependencies between workspaces and packages from a repository', async t => { - generateNocks(t, { - foo: { - versions: ['1.0.0'], - peerDependencies: ['workspace-a'], - }, + await t.test('should allow cyclic peer dependencies between workspaces and packages from a repository', async t => { + const registry = createRegistry(t, false) + const packument = registry.packument({ + name: 'foo', + version: '1.0.0', + peerDependencies: { 'workspace-a': '1.0.0' }, }) + const manifest = registry.manifest({ name: 'foo', packuments: [packument] }) + await registry.package({ manifest }) const path = t.testdir({ 'package.json': JSON.stringify({ name: 'root', @@ -903,31 +887,13 @@ t.test('workspaces', t => { }, }) - const arb = new Arborist({ - ...OPT, - path, - workspaces: ['workspace-a'], - }) - - const tree = arb.buildIdealTree({ - path, - add: [ - 'foo', - ], - }) - - // just assert that the buildIdealTree call resolves, if there's a - // problem here it will reject because of nock disabling requests - await t.resolves(tree) - - t.matchSnapshot(printTree(await tree)) + const arb = newArb(path, { workspaces: ['workspace-a'] }) + const tree = await arb.buildIdealTree({ path, add: ['foo'] }) + t.matchSnapshot(printTree(tree)) }) t.test('workspace nodes are used instead of fetching manifests when they are valid', async t => { - // turn off networking, this should never make a registry request - nock.disableNetConnect() - t.teardown(() => nock.enableNetConnect()) - + createRegistry(t, false) const path = t.testdir({ 'package.json': JSON.stringify({ name: 'root', @@ -999,11 +965,7 @@ t.test('workspaces', t => { }, }) - const arb = new Arborist({ - ...OPT, - path, - workspaces: ['workspace-a', 'workspace-b'], - }) + const arb = newArb(path, { workspaces: ['workspace-a', 'workspace-b'] }) // this will reject if we try to fetch a manifest for some reason const tree = await arb.buildIdealTree({ @@ -1021,11 +983,10 @@ t.test('workspaces', t => { const nodeBfromA = nodeA.edgesOut.get('workspace-b').to.target t.equal(nodeBfromA, nodeB, 'workspace-b edgeOut from workspace-a is the workspace') }) - - t.end() }) t.test('adding tarball to global prefix that is a symlink at a different path depth', async t => { + createRegistry(t, false) const fixt = t.testdir({ 'real-root': {}, 'another-path': { @@ -1033,11 +994,7 @@ t.test('adding tarball to global prefix that is a symlink at a different path de }, }) const path = resolve(fixt, 'another-path/global-root') - const arb = new Arborist({ - path, - global: true, - ...OPT, - }) + const arb = newArb(path, { global: true }) const tarballpath = resolve(__dirname, '../fixtures/registry-mocks/content/mkdirp/-/mkdirp-1.0.2.tgz') const tree = await arb.buildIdealTree({ @@ -1052,6 +1009,7 @@ t.test('adding tarball to global prefix that is a symlink at a different path de }) t.test('add symlink that points to a symlink', t => { + createRegistry(t, false) const fixt = t.testdir({ 'global-prefix': { lib: { @@ -1071,10 +1029,7 @@ t.test('add symlink that points to a symlink', t => { }, }) const path = resolve(fixt, 'my-project') - const arb = new Arborist({ - path, - ...OPT, - }) + const arb = newArb(path) return arb.buildIdealTree({ add: [ // simulates the string used by `npm link ` when @@ -1089,7 +1044,8 @@ t.test('add symlink that points to a symlink', t => { ) }) -t.test('update global space single dep', t => { +t.test('update global space single dep', async t => { + createRegistry(t, true) const fixt = t.testdir({ 'global-prefix': { lib: { @@ -1109,19 +1065,18 @@ t.test('update global space single dep', t => { path, global: true, update: true, - ...OPT, } - const arb = new Arborist(opts) - return arb.buildIdealTree(opts).then(tree => - t.matchSnapshot( - printTree(tree), - 'should update global dependencies' - ) + const arb = newArb(path, opts) + const tree = await arb.buildIdealTree(opts) + t.matchSnapshot( + printTree(tree), + 'should update global dependencies' ) }) // if we get this wrong, it'll spin forever and use up all the memory t.test('pathologically nested dependency cycle', async t => { + createRegistry(t, true) t.matchSnapshot(await printIdeal( resolve(fixtures, 'pathological-dep-nesting-cycle'))) }) @@ -1129,17 +1084,14 @@ t.test('pathologically nested dependency cycle', async t => { t.test('resolve file deps from cwd', async t => { const cwd = process.cwd() t.teardown(() => process.chdir(cwd)) + createRegistry(t, false) const path = t.testdir({ global: {}, local: {}, }) const fixturedir = resolve(fixtures, 'root-bundler') process.chdir(fixturedir) - const arb = new Arborist({ - global: true, - path: resolve(path, 'global'), - ...OPT, - }) + const arb = newArb(resolve(path, 'global'), { global: true }) const tree = await arb.buildIdealTree({ path: `${path}/local`, add: ['child-1.2.3.tgz'], @@ -1152,6 +1104,7 @@ t.test('resolve file deps from cwd', async t => { t.test('resolve links in global mode', async t => { const cwd = process.cwd() t.teardown(() => process.chdir(cwd)) + createRegistry(t, false) const path = t.testdir({ global: {}, lib: { @@ -1167,11 +1120,7 @@ t.test('resolve links in global mode', async t => { const fixturedir = resolve(path, 'lib', 'my-project') process.chdir(fixturedir) - const arb = new Arborist({ - ...OPT, - global: true, - path: resolve(path, 'global'), - }) + const arb = newArb(resolve(path, 'global'), { global: true }) const tree = await arb.buildIdealTree({ add: ['file:../../linked-dep'], global: true, @@ -1181,19 +1130,21 @@ t.test('resolve links in global mode', async t => { }) t.test('dont get confused if root matches duped metadep', async t => { + createRegistry(t, true) const path = resolve(fixtures, 'test-root-matches-metadep') - const arb = new Arborist({ path, installStrategy: 'hoisted', ...OPT }) + const arb = newArb(path, { installStrategy: 'hoisted' }) const tree = await arb.buildIdealTree() t.matchSnapshot(printTree(tree)) }) t.test('inflate an ancient lockfile by hitting the registry', async t => { - const checkLogs = warningTracker() + const warnings = warningTracker(t) + createRegistry(t, true) const path = resolve(fixtures, 'sax') - const arb = new Arborist({ path, ...OPT }) + const arb = newArb(path) const tree = await arb.buildIdealTree() t.matchSnapshot(printTree(tree)) - t.strictSame(checkLogs(), [ + t.strictSame(warnings, [ [ 'warn', 'ancient lockfile', @@ -1208,12 +1159,12 @@ This is a one-time fix-up, please be patient... }) t.test('inflating a link node in an old lockfile skips registry', async t => { - const checkLogs = warningTracker() + const warnings = warningTracker(t) const path = resolve(fixtures, 'old-lock-with-link') - const arb = new Arborist({ path, ...OPT, registry: 'http://invalid.host' }) + const arb = newArb(path, { registry: 'http://invalid.host' }) const tree = await arb.buildIdealTree() t.matchSnapshot(printTree(tree)) - t.strictSame(checkLogs(), [ + t.strictSame(warnings, [ [ 'warn', 'old lockfile', @@ -1228,12 +1179,13 @@ This is a one-time fix-up, please be patient... }) t.test('warn for ancient lockfile, even if we use v1', async t => { - const checkLogs = warningTracker() + const warnings = warningTracker(t) const path = resolve(fixtures, 'sax') - const arb = new Arborist({ path, lockfileVersion: 1, ...OPT }) + createRegistry(t, true) + const arb = newArb(path, { lockfileVersion: 1 }) const tree = await arb.buildIdealTree() t.matchSnapshot(printTree(tree)) - t.strictSame(checkLogs(), [ + t.strictSame(warnings, [ [ 'warn', 'ancient lockfile', @@ -1248,21 +1200,23 @@ This is a one-time fix-up, please be patient... }) t.test('no old lockfile warning if we write back v1', async t => { - const checkLogs = warningTracker() + const warnings = warningTracker(t) const path = resolve(fixtures, 'old-package-lock') - const arb = new Arborist({ path, lockfileVersion: 1, ...OPT }) + createRegistry(t, true) + const arb = newArb(path, { lockfileVersion: 1 }) const tree = await arb.buildIdealTree() t.matchSnapshot(printTree(tree)) - t.strictSame(checkLogs(), []) + t.strictSame(warnings, []) }) t.test('inflate an ancient lockfile with a dep gone missing', async t => { - const checkLogs = warningTracker() + const warnings = warningTracker(t) const path = resolve(fixtures, 'ancient-lockfile-invalid') - const arb = new Arborist({ path, ...OPT }) + createRegistry(t, true) + const arb = newArb(path) const tree = await arb.buildIdealTree() t.matchSnapshot(printTree(tree)) - t.match(checkLogs(), [ + t.match(warnings, [ [ 'warn', 'ancient lockfile', @@ -1283,12 +1237,13 @@ This is a one-time fix-up, please be patient... }) t.test('complete build for project with old lockfile', async t => { - const checkLogs = warningTracker() + const warnings = warningTracker(t) + createRegistry(t, false) const path = resolve(fixtures, 'link-dep-empty') - const arb = new Arborist({ path, ...OPT }) + const arb = newArb(path) const tree = await arb.buildIdealTree({ complete: true }) t.matchSnapshot(printTree(tree)) - t.match(checkLogs(), [ + t.match(warnings, [ oldLockfileWarning, ]) }) @@ -1310,9 +1265,11 @@ t.test('no old lockfile warning with no package-lock', async t => { }, }), }) - const checkLogs = warningTracker() + const warnings = warningTracker(t) + const registry = createRegistry(t, false) + registry.audit() await newArb(fixt).reify() - t.strictSame(checkLogs(), []) + t.strictSame(warnings, []) }) t.test('no old lockfile warning with a conflict package-lock', async t => { @@ -1335,9 +1292,11 @@ t.test('no old lockfile warning with a conflict package-lock', async t => { resolve(fixtures, 'conflict-package-lock/package-lock.json') ), }) - const checkLogs = warningTracker() + const warnings = warningTracker(t) + const registry = createRegistry(t, false) + registry.audit() await newArb(fixt).reify() - t.strictSame(checkLogs(), []) + t.strictSame(warnings, []) }) t.test('override a conflict with the root dep (with force)', async t => { @@ -1346,6 +1305,7 @@ t.test('override a conflict with the root dep (with force)', async t => { await t.rejects(() => buildIdeal(path), { code: 'ERESOLVE', }) + createRegistry(t, false) t.matchSnapshot(await printIdeal(path, { strictPeerDeps: true, force: true }), 'strict and force override') t.matchSnapshot(await printIdeal(path, { strictPeerDeps: false, force: true }), 'non-strict and force override') }) @@ -1355,12 +1315,14 @@ t.test('override a conflict with the root peer dep (with force)', async t => { await t.rejects(() => buildIdeal(path, { strictPeerDeps: true }), { code: 'ERESOLVE', }) + createRegistry(t, false) t.matchSnapshot(await printIdeal(path, { strictPeerDeps: true, force: true }), 'strict and force override') t.matchSnapshot(await printIdeal(path, { strictPeerDeps: false, force: true }), 'non-strict and force override') }) t.test('push conflicted peer deps deeper in to the tree to solve', async t => { const path = resolve(fixtures, 'testing-peer-dep-conflict-chain/override-dep') + createRegistry(t, true) t.matchSnapshot(await printIdeal(path)) }) @@ -1394,13 +1356,14 @@ t.test('do not continually re-resolve deps that failed to load', async t => { }, }), }) - const arb = new Arborist({ ...OPT, path }) - t.rejects(() => arb.buildIdealTree({ add: [ - '@isaacs/this-does-not-exist-actually@2.x', - ] }), { code: 'E404' }) + createRegistry(t, true) + const arb = newArb(path) + await t.rejects(() => arb.buildIdealTree({ + add: ['@isaacs/this-does-not-exist-actually@2.x'], + }), { code: 'E404' }) }) -t.test('update a node if its bundled by the root project', async t => { +t.test('update a node if it is bundled by the root project', async t => { const path = t.testdir({ node_modules: { abbrev: { @@ -1417,12 +1380,13 @@ t.test('update a node if its bundled by the root project', async t => { }, }), }) - const arb = new Arborist({ ...OPT, path }) + createRegistry(t, true) + const arb = newArb(path) await arb.buildIdealTree({ update: ['abbrev'] }) t.equal(arb.idealTree.children.get('abbrev').version, '1.1.1') }) -t.test('more peer dep conflicts', t => { +t.test('more peer dep conflicts', async t => { // each of these is installed and should pass in force mode, // fail in strictPeerDeps mode, and pass/fail based on the // 'error' field in non-strict/non-forced mode. @@ -1693,13 +1657,9 @@ t.test('more peer dep conflicts', t => { }, }) - if (process.platform !== 'win32') { - t.jobs = cases.length - } - t.plan(cases.length) - + createRegistry(t, true) for (const [name, { pkg, error, resolvable, add }] of cases) { - t.test(name, { buffer: true }, async t => { + await t.test(name, { buffer: true }, async t => { const path = t.testdir({ 'package.json': JSON.stringify(pkg), }) @@ -1712,9 +1672,9 @@ t.test('more peer dep conflicts', t => { error: () => {}, warn: (...msg) => warnings.push(normalizePaths(msg)), } - const strict = new Arborist({ ...OPT, path, strictPeerDeps: true }) - const force = new Arborist({ ...OPT, path, force: true }) - const def = new Arborist({ ...OPT, path, log }) + const strict = newArb(path, { strictPeerDeps: true }) + const force = newArb(path, { force: true }) + const def = newArb(path, { log }) // cannot do this in parallel on Windows machines, or it // crashes in CI with an EBUSY error when it tries to read @@ -1768,14 +1728,14 @@ t.test('more peer dep conflicts', t => { } }) -t.test('cases requiring peer sets to be nested', t => { +t.test('cases requiring peer sets to be nested', async t => { const cases = [ 'multi', 'simple', ] - t.plan(cases.length) for (const c of cases) { - t.test(c, async t => { + await t.test(c, async t => { + createRegistry(t, true) const path = resolve(`${fixtures}/testing-peer-dep-nesting/${c}`) t.matchSnapshot(await printIdeal(path)) }) @@ -1787,13 +1747,13 @@ t.test('make sure yargs works', async t => { // tests faster and force us to fully understand a problem, yargs has // been a bountiful source of complicated eslint peerDep issues. const yargs = resolve(fixtures, 'yargs') + createRegistry(t, true) t.matchSnapshot(await printIdeal(yargs), 'yargs should build fine') }) -t.test('allow updating when peer outside of explicit update set', t => { +t.test('allow updating when peer outside of explicit update set', async t => { // see https://github.com/npm/cli/issues/2000 - t.plan(2) - t.test('valid, no force required', async t => { + await t.test('valid, no force required', async t => { const path = t.testdir({ 'package.json': JSON.stringify({ name: 'x', @@ -1929,6 +1889,7 @@ t.test('allow updating when peer outside of explicit update set', t => { }, }), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { add: [ '@isaacs/testing-peer-dep-conflict-chain-single-a@2', @@ -1937,7 +1898,7 @@ t.test('allow updating when peer outside of explicit update set', t => { })) }) - t.test('conflict, but resolves appropriately with --force', async t => { + await t.test('conflict, but resolves appropriately with --force', async t => { const path = t.testdir({ 'package.json': JSON.stringify({ name: 'x', @@ -2072,6 +2033,7 @@ t.test('allow updating when peer outside of explicit update set', t => { }, }), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { force: true, add: [ @@ -2090,6 +2052,7 @@ t.test('allow updating when peer outside of explicit update set', t => { t.test('carbonium eslint conflicts', async t => { const path = resolve(fixtures, 'carbonium') + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { add: [ '@typescript-eslint/eslint-plugin@4', @@ -2100,7 +2063,8 @@ t.test('carbonium eslint conflicts', async t => { t.test('peerOptionals that are devDeps or explicit request', async t => { const path = resolve(fixtures, 'peer-optional-installs') - const arb = new Arborist({ path, ...OPT }) + createRegistry(t, true) + const arb = newArb(path) const tree = await arb.buildIdealTree({ add: ['abbrev'] }) t.matchSnapshot(printTree(tree), 'should install the abbrev dep') t.ok(tree.children.get('abbrev'), 'should install abbrev dep') @@ -2128,7 +2092,8 @@ t.test('weird thing when theres a link to ..', async t => { }), }, }) + '/y' - const arb = new Arborist({ path, ...OPT }) + createRegistry(t, false) + const arb = newArb(path) const tree = await arb.buildIdealTree() t.equal(tree.children.get('x').target.fsParent, null) }) @@ -2145,6 +2110,7 @@ t.test('always prefer deduping peer deps', async t => { }, }), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path)) }) @@ -2162,7 +2128,8 @@ t.test('do not ever nest peer deps underneath their dependent ever', async t => }, }), }) - t.rejects(printIdeal(path), { code: 'ERESOLVE' }) + createRegistry(t, true) + await t.rejects(printIdeal(path), { code: 'ERESOLVE' }) }) t.test('properly fail on conflicted peerOptionals', async t => { @@ -2178,12 +2145,14 @@ t.test('properly fail on conflicted peerOptionals', async t => { }, }), }) + createRegistry(t, true) await t.rejects(printIdeal(path), { code: 'ERESOLVE' }) }) t.test('properly assign fsParent when paths have .. in them', async t => { const path = resolve(fixtures, 'fs-parent-dots/x/y/z') - const arb = new Arborist({ ...OPT, path }) + createRegistry(t, false) + const arb = newArb(path) const tree = await arb.buildIdealTree() t.matchSnapshot(printTree(tree)) for (const child of tree.children.values()) { @@ -2256,6 +2225,7 @@ t.test('update global', async t => { }, }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { global: true, update: ['abbrev'] }), 'updating missing dep should have no effect, but fix the invalid node') @@ -2270,7 +2240,7 @@ t.test('update global', async t => { 'once@2', ] for (const updateName of invalidArgs) { - t.rejects( + await t.rejects( printIdeal(path, { global: true, update: [updateName] }), { code: 'EUPDATEARGS' }, 'should throw an error when using semver ranges' @@ -2290,6 +2260,7 @@ t.test('update global when nothing in global', async t => { node_modules: {}, }, }) + createRegistry(t, false) const opts = { global: true, update: true } t.matchSnapshot(await printIdeal(path + '/no_nm', opts), 'update without node_modules') @@ -2310,6 +2281,7 @@ t.test('peer dep that needs to be replaced', async t => { }, }), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path)) }) @@ -2324,11 +2296,10 @@ t.test('transitive conflicted peer dependency', async t => { }, }), }) - const strict = { strictPeerDeps: true } - const force = { force: true } + createRegistry(t, true) t.matchSnapshot(await printIdeal(path)) - t.matchSnapshot(await printIdeal(path, force)) - await t.rejects(printIdeal(path, strict), { code: 'ERESOLVE' }) + t.matchSnapshot(await printIdeal(path, { force: true })) + await t.rejects(printIdeal(path, { strictPeerDeps: true }), { code: 'ERESOLVE' }) }) t.test('remove deps when initializing tree from actual tree', async t => { @@ -2343,17 +2314,18 @@ t.test('remove deps when initializing tree from actual tree', async t => { }, }) - const arb = new Arborist({ path, ...OPT }) + createRegistry(t, false) + const arb = newArb(path) const tree = await arb.buildIdealTree({ rm: ['foo'] }) t.equal(tree.children.get('foo'), undefined, 'removed foo child') }) -t.test('detect conflicts in transitive peerOptional deps', t => { - t.plan(2) +t.test('detect conflicts in transitive peerOptional deps', async t => { const base = resolve(fixtures, 'test-conflicted-optional-peer-dep') - t.test('nest when peerOptional conflicts', async t => { + await t.test('nest when peerOptional conflicts', async t => { const path = resolve(base, 'nest-peer-optional') + createRegistry(t, true) const tree = await buildIdeal(path) t.matchSnapshot(printTree(tree)) const name = '@isaacs/test-conflicted-optional-peer-dep-peer' @@ -2361,8 +2333,9 @@ t.test('detect conflicts in transitive peerOptional deps', t => { t.equal(peers.size, 2, 'installed the peer dep twice to avoid conflict') }) - t.test('omit peerOptionals when not needed for conflicts', async t => { + await t.test('omit peerOptionals when not needed for conflicts', async t => { const path = resolve(base, 'omit-peer-optional') + createRegistry(t, true) const tree = await buildIdeal(path) t.matchSnapshot(printTree(tree)) const name = '@isaacs/test-conflicted-optional-peer-dep-peer' @@ -2373,6 +2346,7 @@ t.test('detect conflicts in transitive peerOptional deps', t => { t.test('do not fail if root peerDep looser than meta peerDep', async t => { const path = resolve(fixtures, 'test-peer-looser-than-dev') + createRegistry(t, true) t.matchSnapshot(await printIdeal(path)) }) @@ -2392,6 +2366,7 @@ t.test('adding existing dep with updateable version in package.json', async t => }), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { add: ['lodash'] })) }) @@ -2409,8 +2384,9 @@ t.test('set the current on ERESOLVE triggered by devDeps', async t => { }), }) - const arb = new Arborist({ path, ...OPT }) - t.rejects(arb.buildIdealTree(), { + createRegistry(t, true) + const arb = newArb(path) + await t.rejects(arb.buildIdealTree(), { code: 'ERESOLVE', current: { name: 'eslint', @@ -2425,8 +2401,8 @@ t.test('set the current on ERESOLVE triggered by devDeps', async t => { }) }) -t.test('shrinkwrapped dev/optional deps should not clobber flags', t => { - t.test('optional', async t => { +t.test('shrinkwrapped dev/optional deps should not clobber flags', async t => { + await t.test('optional', async t => { const path = t.testdir({ 'package.json': JSON.stringify({ name: 'project', @@ -2436,6 +2412,7 @@ t.test('shrinkwrapped dev/optional deps should not clobber flags', t => { }, }), }) + createRegistry(t, true) const tree = await buildIdeal(path, { complete: true }) const swName = '@isaacs/test-package-with-shrinkwrap' const swDep = tree.children.get(swName) @@ -2448,7 +2425,7 @@ t.test('shrinkwrapped dev/optional deps should not clobber flags', t => { t.equal(metaDep.dev, false, 'meta dep is not dev') }) - t.test('dev', async t => { + await t.test('dev', async t => { const path = t.testdir({ 'package.json': JSON.stringify({ name: 'project', @@ -2458,6 +2435,7 @@ t.test('shrinkwrapped dev/optional deps should not clobber flags', t => { }, }), }) + createRegistry(t, true) const tree = await buildIdeal(path, { complete: true }) const swName = '@isaacs/test-package-with-shrinkwrap' const swDep = tree.children.get(swName) @@ -2469,18 +2447,16 @@ t.test('shrinkwrapped dev/optional deps should not clobber flags', t => { t.equal(swDep.optional, false, 'sw dep is not optional') t.equal(metaDep.optional, false, 'meta dep is not optional') }) - - t.end() }) -t.test('do not ERESOLVE on peerOptionals that are ignored anyway', t => { +t.test('do not ERESOLVE on peerOptionals that are ignored anyway', async t => { // this simulates three cases where a conflict occurs during the peerSet // generation phase, but will not manifest in the tree building phase. const base = resolve(fixtures, 'peer-optional-eresolve') const cases = ['a', 'b', 'c', 'd', 'e', 'f'] - t.plan(cases.length) for (const c of cases) { - t.test(`case ${c}`, async t => { + await t.test(`case ${c}`, async t => { + createRegistry(t, true) const path = resolve(base, c) t.matchSnapshot(await printIdeal(path)) }) @@ -2497,8 +2473,7 @@ t.test('allow ERESOLVE to be forced when not in the source', async t => { // in these tests, the deps are both of the same type. b has a peerOptional // dep on peer, and peer is a direct dependency of the root. - t.test('both direct and peer of the same type', t => { - t.plan(types.length) + await t.test('both direct and peer of the same type', async t => { const pj = type => ({ name: '@isaacs/conflicted-peer-optional-from-dev-dep', version: '1.2.3', @@ -2509,10 +2484,11 @@ t.test('allow ERESOLVE to be forced when not in the source', async t => { }) for (const type of types) { - t.test(type, async t => { + await t.test(type, async t => { const path = t.testdir({ 'package.json': JSON.stringify(pj(type)), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { force: true }), 'use the force') t.rejects(printIdeal(path), { code: 'ERESOLVE' }, 'no force') }) @@ -2520,7 +2496,7 @@ t.test('allow ERESOLVE to be forced when not in the source', async t => { }) // in these, the peer is a peer dep of the root, and b is a different type - t.test('peer is peer, b is some other type', t => { + await t.test('peer is peer, b is some other type', async t => { t.plan(types.length - 1) const pj = type => ({ name: '@isaacs/conflicted-peer-optional-from-dev-dep', @@ -2536,10 +2512,11 @@ t.test('allow ERESOLVE to be forced when not in the source', async t => { if (type === 'peerDependencies') { continue } - t.test(type, async t => { + await t.test(type, async t => { const path = t.testdir({ 'package.json': JSON.stringify(pj(type)), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { force: true }), 'use the force') t.rejects(printIdeal(path), { code: 'ERESOLVE' }, 'no force') }) @@ -2547,8 +2524,7 @@ t.test('allow ERESOLVE to be forced when not in the source', async t => { }) // in these, b is a peer dep, and peer is some other type - t.test('peer is peer, b is some other type', t => { - t.plan(types.length - 1) + await t.test('peer is peer, b is some other type', async t => { const pj = type => ({ name: '@isaacs/conflicted-peer-optional-from-dev-dep', version: '1.2.3', @@ -2563,17 +2539,16 @@ t.test('allow ERESOLVE to be forced when not in the source', async t => { if (type === 'peerDependencies') { continue } - t.test(type, async t => { + await t.test(type, async t => { const path = t.testdir({ 'package.json': JSON.stringify(pj(type)), }) + createRegistry(t, true) t.matchSnapshot(await printIdeal(path, { force: true }), 'use the force') t.rejects(printIdeal(path), { code: 'ERESOLVE' }, 'no force') }) } }) - - t.end() }) t.test('allow a link dep to satisfy a peer dep', async t => { @@ -2599,6 +2574,7 @@ t.test('allow a link dep to satisfy a peer dep', async t => { }, }) + createRegistry(t, true) const add = ['@isaacs/testing-peer-dep-conflict-chain-vv@2'] // avoids if the link dep is unmet @@ -2680,22 +2656,24 @@ t.test('replace a link with a matching link when the current one is wrong', asyn }, }), }) + createRegistry(t, false) t.matchSnapshot(await printIdeal(path, { workspaces: null, // also test that a null workspaces is ignored. }), 'replace incorrect with correct') }) -t.test('cannot do workspaces in global mode', t => { +t.test('cannot do workspaces in global mode', async t => { + createRegistry(t, false) t.throws(() => printIdeal(t.testdir(), { workspaces: ['a', 'b', 'c'], global: true, }), { message: 'Cannot operate on workspaces in global mode' }) - t.end() }) t.test('add packages to workspaces, not root', async t => { const path = resolve(__dirname, '../fixtures/workspaces-not-root') + createRegistry(t, true) const addTree = await buildIdeal(path, { add: ['wrappy@1.0.1'], workspaces: ['a', 'c'], @@ -2724,6 +2702,7 @@ t.test('add one workspace to another', async t => { const path = resolve(__dirname, '../fixtures/workspaces-not-root') const packageA = resolve(path, 'packages/a') + createRegistry(t, false) const addTree = await buildIdeal(path, { add: [packageA], workspaces: ['c'], @@ -2769,23 +2748,25 @@ t.test('workspace error handling', async t => { }, }, }) - t.test('set filter, but no workspaces present', async t => { - const logs = warningTracker() + await t.test('set filter, but no workspaces present', async t => { + const warnings = warningTracker(t) + createRegistry(t, false) await buildIdeal(resolve(path, 'packages/a'), { workspaces: ['a'], }) - t.strictSame(logs(), [[ + t.strictSame(warnings, [[ 'warn', 'workspaces', 'filter set, but no workspaces present', ]], 'got warning') }) - t.test('set filter for workspace that is not present', async t => { - const logs = warningTracker() + await t.test('set filter for workspace that is not present', async t => { + const warnings = warningTracker(t) + createRegistry(t, true) await buildIdeal(path, { workspaces: ['not-here'], }) - t.strictSame(logs(), [[ + t.strictSame(warnings, [[ 'warn', 'workspaces', 'not-here in filter set, but not in workspaces', @@ -2807,6 +2788,7 @@ t.test('avoid dedupe when a dep is bundled', async t => { }), }) + createRegistry(t, true) // do our install, prior to the publishing of b@2.1.0 const startTree = await buildIdeal(path, { // date between publish times of b@2.0.0 and b@2.1.0 @@ -2820,7 +2802,7 @@ t.test('avoid dedupe when a dep is bundled', async t => { // +-- b@2.0 await startTree.meta.save() let b200 - t.test('initial tree state', t => { + await t.test('initial tree state', async t => { const a = startTree.children.get('@isaacs/testing-bundle-dupes-a') const b = startTree.children.get('@isaacs/testing-bundle-dupes-b') const bNested = a.children.get('@isaacs/testing-bundle-dupes-b') @@ -2829,21 +2811,19 @@ t.test('avoid dedupe when a dep is bundled', async t => { t.equal(bNested.version, '2.0.0') // save this to synthetically create the dupe later, so we can fix it b200 = bNested - t.end() }) // Now ensure that adding b@2 will install b@2.1.0 AND // dedupe the nested b@2.0.0 dep. const add = ['@isaacs/testing-bundle-dupes-b@2'] const newTree = await buildIdeal(path, { add }) - t.test('did not create dupe', t => { + await t.test('did not create dupe', async t => { const a = newTree.children.get('@isaacs/testing-bundle-dupes-a') const b = newTree.children.get('@isaacs/testing-bundle-dupes-b') const bNested = a.children.get('@isaacs/testing-bundle-dupes-b') t.equal(b.version, '2.1.0') t.equal(a.version, '2.0.0') t.notOk(bNested, 'should not have a nested b') - t.end() }) // now, synthetically create the bug we just verified no longer happens, @@ -2862,7 +2842,7 @@ t.test('avoid dedupe when a dep is bundled', async t => { })) // gut check that we have reproduced the error condition - t.test('gut check that dupe synthetically created', t => { + await t.test('gut check that dupe synthetically created', async t => { const a = newTree.children.get('@isaacs/testing-bundle-dupes-a') const b = newTree.children.get('@isaacs/testing-bundle-dupes-b') const bNested = a.children.get('@isaacs/testing-bundle-dupes-b') @@ -2883,24 +2863,24 @@ t.test('avoid dedupe when a dep is bundled', async t => { t.notOk(bNested, 'should not have a nested b') } - t.test('dedupe to remove dupe', async t => { + await t.test('dedupe to remove dupe', async t => { check(t, await buildIdeal(path, { update: ['@isaacs/testing-bundle-dupes-b'], preferDedupe: true, })) }) - t.test('update b to remove dupe', async t => { + await t.test('update b to remove dupe', async t => { check(t, await buildIdeal(path, { update: ['@isaacs/testing-bundle-dupes-b'], })) }) - t.test('update all to remove dupe', async t => { + await t.test('update all to remove dupe', async t => { check(t, await buildIdeal(path, { update: true })) }) - t.test('reinstall a to remove dupe', async t => { + await t.test('reinstall a to remove dupe', async t => { check(t, await buildIdeal(path, { add: ['@isaacs/testing-bundle-dupes-a@2'], })) @@ -2923,6 +2903,7 @@ t.test('upgrade a partly overlapping peer set', async t => { }, }), }) + createRegistry(t, true) const tree = await buildIdeal(path) await tree.meta.save() t.matchSnapshot(await printIdeal(path, { @@ -2940,9 +2921,10 @@ t.test('fail to upgrade a partly overlapping peer set', async t => { }, }), }) + createRegistry(t, true) const tree = await buildIdeal(path) await tree.meta.save() - t.rejects(printIdeal(path, { + await t.rejects(printIdeal(path, { add: ['@isaacs/testing-peer-dep-conflict-chain-y@3'], }), { code: 'ERESOLVE' }, 'should not be able to upgrade dep') }) @@ -2979,6 +2961,7 @@ t.test('add deps to workspaces', async t => { const path = t.testdir(fixtureDef) t.test('no args', async t => { + createRegistry(t, true) const tree = await buildIdeal(path) t.equal(tree.children.get('mkdirp').version, '1.0.4') t.equal(tree.children.get('a').target.children.get('mkdirp').version, '0.5.5') @@ -2987,6 +2970,7 @@ t.test('add deps to workspaces', async t => { }) t.test('add mkdirp 0.5.0 to b', async t => { + createRegistry(t, true) const tree = await buildIdeal(path, { workspaces: ['b'], add: ['mkdirp@0.5.0'] }) t.equal(tree.children.get('mkdirp').version, '1.0.4') t.equal(tree.children.get('a').target.children.get('mkdirp').version, '0.5.5') @@ -2995,6 +2979,7 @@ t.test('add deps to workspaces', async t => { }) t.test('remove mkdirp from a', async t => { + createRegistry(t, true) const tree = await buildIdeal(path, { workspaces: ['a'], rm: ['mkdirp'] }) t.equal(tree.children.get('mkdirp').version, '1.0.4') t.equal(tree.children.get('a').target.children.get('mkdirp'), undefined) @@ -3003,6 +2988,7 @@ t.test('add deps to workspaces', async t => { }) t.test('upgrade mkdirp in a, dedupe on root', async t => { + createRegistry(t, true) const tree = await buildIdeal(path, { workspaces: ['a'], add: ['mkdirp@1'] }) t.equal(tree.children.get('mkdirp').version, '1.0.4') t.equal(tree.children.get('a').target.children.get('mkdirp'), undefined) @@ -3013,6 +2999,8 @@ t.test('add deps to workspaces', async t => { t.test('KEEP in the root, prune out unnecessary dupe', async t => { const path = t.testdir(fixtureDef) + const registry = createRegistry(t, true) + registry.audit() const arb = newArb(path) // reify first so that the other mkdirp is present in the tree await arb.reify() @@ -3057,6 +3045,7 @@ t.test('add deps and include workspace-root', async t => { const path = t.testdir(fixtureDef) t.test('no args', async t => { + createRegistry(t, true) const tree = await buildIdeal(path) t.equal(tree.children.get('mkdirp').version, '1.0.4') t.equal(tree.children.get('a').target.children.get('mkdirp').version, '0.5.5') @@ -3066,6 +3055,7 @@ t.test('add deps and include workspace-root', async t => { }) t.test('add mkdirp 0.5.0 to b', async t => { + createRegistry(t, true) const tree = await buildIdeal(path, { workspaces: ['b'], add: ['mkdirp@0.5.0'], includeWorkspaceRoot: true }) t.equal(tree.children.get('mkdirp').version, '0.5.0') t.ok(tree.edgesOut.has('mkdirp')) @@ -3103,6 +3093,7 @@ t.test('inflates old lockfile with hasInstallScript', async t => { }, }, }) + createRegistry(t, true) const tree = await buildIdeal(path, { add: ['esbuild@0.11.10'], @@ -3129,25 +3120,25 @@ t.test('update a global space that contains a link', async t => { once: t.fixture('symlink', '../target'), }, }) + createRegistry(t, true) const tree = await buildIdeal(path, { update: true, global: true }) t.matchSnapshot(printTree(tree)) t.equal(tree.children.get('once').isLink, true) }) -t.test('peer conflicts between peer sets in transitive deps', t => { - t.plan(4) - +t.test('peer conflicts between peer sets in transitive deps', async t => { // caused an infinite loop in https://github.com/npm/arborist/issues/325, // which is the reason for the package name. - t.test('y and j@2 at root, x and j@1 underneath a', async t => { + await t.test('y and j@2 at root, x and j@1 underneath a', async t => { const path = t.testdir({ 'package.json': '{}', }) - const warnings = warningTracker() + createRegistry(t, true) + const warnings = warningTracker(t) const tree = await buildIdeal(path, { add: ['@isaacs/peer-dep-conflict-infinite-loop-a@1'], }) - t.strictSame(warnings(), []) + t.strictSame(warnings, []) const a = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-a') const j = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-j') const x = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-x') @@ -3164,15 +3155,16 @@ t.test('peer conflicts between peer sets in transitive deps', t => { t.notOk(ay) }) - t.test('x and j@1 at root, y and j@2 underneath a', async t => { + await t.test('x and j@1 at root, y and j@2 underneath a', async t => { const path = t.testdir({ 'package.json': '{}', }) - const warnings = warningTracker() + createRegistry(t, true) + const warnings = warningTracker(t) const tree = await buildIdeal(path, { add: ['@isaacs/peer-dep-conflict-infinite-loop-a@2'], }) - t.strictSame(warnings(), []) + t.strictSame(warnings, []) const a = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-a') const j = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-j') const x = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-x') @@ -3189,15 +3181,16 @@ t.test('peer conflicts between peer sets in transitive deps', t => { t.notOk(ax) }) - t.test('get warning, x and j@1 in root, put y and j@3 in a', async t => { + await t.test('get warning, x and j@1 in root, put y and j@3 in a', async t => { const path = t.testdir({ 'package.json': '{}', }) - const warnings = warningTracker() + createRegistry(t, true) + const warnings = warningTracker(t) const tree = await buildIdeal(path, { add: ['@isaacs/peer-dep-conflict-infinite-loop-a@3'], }) - const w = warnings() + const w = warnings t.match(w, [['warn', 'ERESOLVE', 'overriding peer dependency', { code: 'ERESOLVE', }]], 'warning is an ERESOLVE') @@ -3219,15 +3212,16 @@ t.test('peer conflicts between peer sets in transitive deps', t => { t.notOk(ax) }) - t.test('x and j@1 at root, y and j@2 underneath a (no a->j dep)', async t => { + await t.test('x and j@1 at root, y and j@2 underneath a (no a->j dep)', async t => { const path = t.testdir({ 'package.json': '{}', }) - const warnings = warningTracker() + const warnings = warningTracker(t) + createRegistry(t, true) const tree = await buildIdeal(path, { add: ['@isaacs/peer-dep-conflict-infinite-loop-a@4'], }) - t.strictSame(warnings(), [], 'no warnings') + t.strictSame(warnings, [], 'no warnings') const a = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-a') const j = tree.children.get('@isaacs/peer-dep-conflict-infinite-loop-j') @@ -3244,14 +3238,11 @@ t.test('peer conflicts between peer sets in transitive deps', t => { t.equal(aj.version, '2.0.0') t.notOk(ax) }) - - t.end() }) -t.test('competing peerSets resolve in both root and workspace', t => { +t.test('competing peerSets resolve in both root and workspace', async t => { // The following trees caused an infinite loop in a workspace // https://github.com/npm/cli/issues/3933 - t.plan(2) const rootAndWs = async dependencies => { const fixt = t.testdir({ @@ -3283,7 +3274,7 @@ t.test('competing peerSets resolve in both root and workspace', t => { ] } - t.test('overlapping peerSets dont warn', async t => { + await t.test('overlapping peerSets dont warn', async t => { // This should not cause a warning because replacing `c@2` and `d@2` // with `c@1` and `d@1` is still valid. // @@ -3296,7 +3287,8 @@ t.test('competing peerSets resolve in both root and workspace', t => { // d@2 -> PEER(c@2) // ``` - const warnings = warningTracker() + createRegistry(t, true) + const warnings = warningTracker(t) const [rootTree, wsTree] = await rootAndWs({ '@lukekarrys/workspace-peer-dep-infinite-loop-a': '1', }) @@ -3320,7 +3312,7 @@ t.test('competing peerSets resolve in both root and workspace', t => { t.equal(wsC.version, '1.0.0', 'workspace c version') t.equal(wsD.version, '1.0.0', 'workspace d version') - const [rootWarnings = [], wsWarnings = []] = warnings() + const [rootWarnings = [], wsWarnings = []] = warnings // TODO: these warn for now but shouldnt // https://github.com/npm/arborist/issues/347 t.comment('FIXME') @@ -3337,7 +3329,7 @@ t.test('competing peerSets resolve in both root and workspace', t => { t.matchSnapshot(printTree(wsTree), 'workspace tree') }) - t.test('conflicting peerSets do warn', async t => { + await t.test('conflicting peerSets do warn', async t => { // ``` // project -> (a@2) // a@2 -> (b), PEER(c@2), PEER(d@2) @@ -3347,7 +3339,8 @@ t.test('competing peerSets resolve in both root and workspace', t => { // d@2 -> PEER(c@2) // ``` - const warnings = warningTracker() + createRegistry(t, true) + const warnings = warningTracker(t) const [rootTree, wsTree] = await rootAndWs({ // It's 2.0.1 because I messed up publishing 2.0.0 '@lukekarrys/workspace-peer-dep-infinite-loop-a': '2.0.1', @@ -3387,7 +3380,7 @@ t.test('competing peerSets resolve in both root and workspace', t => { t.equal((wsTargetC || {}).version, undefined, 'workspace target c version') t.equal((wsTargetD || {}).version, undefined, 'workspace target d version') - const [rootWarnings, wsWarnings] = warnings() + const [rootWarnings, wsWarnings] = warnings t.match(rootWarnings, ['warn', 'ERESOLVE', 'overriding peer dependency', { code: 'ERESOLVE', }], 'root warning is an ERESOLVE') @@ -3400,12 +3393,10 @@ t.test('competing peerSets resolve in both root and workspace', t => { t.matchSnapshot(printTree(rootTree), 'root tree') t.matchSnapshot(printTree(wsTree), 'workspace tree') }) - - t.end() }) -t.test('overrides', t => { - t.test('throws when override conflicts with dependencies', async (t) => { +t.test('overrides', async t => { + await t.test('throws when override conflicts with dependencies', async (t) => { const path = t.testdir({ 'package.json': JSON.stringify({ name: 'root', @@ -3418,10 +3409,11 @@ t.test('overrides', t => { }), }) + createRegistry(t, false) await t.rejects(buildIdeal(path), { code: 'EOVERRIDE' }, 'throws EOVERRIDE') }) - t.test('throws when override conflicts with devDependencies', async (t) => { + await t.test('throws when override conflicts with devDependencies', async (t) => { const path = t.testdir({ 'package.json': JSON.stringify({ name: 'root', @@ -3434,10 +3426,11 @@ t.test('overrides', t => { }), }) + createRegistry(t, false) await t.rejects(buildIdeal(path), { code: 'EOVERRIDE' }, 'throws EOVERRIDE') }) - t.test('throws when override conflicts with peerDependencies', async (t) => { + await t.test('throws when override conflicts with peerDependencies', async (t) => { const path = t.testdir({ 'package.json': JSON.stringify({ name: 'root', @@ -3450,20 +3443,11 @@ t.test('overrides', t => { }), }) + createRegistry(t, false) await t.rejects(buildIdeal(path), { code: 'EOVERRIDE' }, 'throws EOVERRIDE') }) t.test('overrides a nested dependency', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) - const path = t.testdir({ 'package.json': JSON.stringify({ name: 'root', @@ -3475,6 +3459,17 @@ t.test('overrides', t => { }, }), }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) const tree = await buildIdeal(path) @@ -3487,15 +3482,17 @@ t.test('overrides', t => { }) t.test('overrides a nested dependency with a more specific override', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3527,15 +3524,17 @@ t.test('overrides', t => { }) t.test('does not override a nested dependency when parent spec does not match', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest, times: 2 }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3567,15 +3566,17 @@ t.test('overrides', t => { }) t.test('overrides a nested dependency that also exists as a direct dependency', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3610,15 +3611,17 @@ t.test('overrides', t => { }) t.test('overrides a nested dependency that also exists as a direct dependency without a top level specifier', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3651,15 +3654,17 @@ t.test('overrides', t => { }) t.test('overrides a nested dependency with a reference to a direct dependency', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3694,15 +3699,17 @@ t.test('overrides', t => { }) t.test('overrides a nested dependency with a reference to a direct dependency without a top level identifier', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3735,15 +3742,17 @@ t.test('overrides', t => { }) t.test('overrides a peerDependency', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - peerDependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3771,15 +3780,17 @@ t.test('overrides', t => { }) t.test('overrides a peerDependency without top level specifier', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - peerDependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + await registry.package({ manifest: fooManifest }) + await registry.package({ manifest: barManifest }) // this again with no foo const path = t.testdir({ @@ -3806,20 +3817,28 @@ t.test('overrides', t => { }) t.test('can override inside a cyclical dep chain', async (t) => { - generateNocks(t, { - foo: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['bar'], - }, - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['baz'], - }, - baz: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - dependencies: ['foo'], - }, - }) + const registry = createRegistry(t, false) + const fooPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { bar: '1.0.0' } }, + { version: '1.0.1', dependencies: { bar: '1.0.1' } }, + { version: '2.0.0', dependencies: { bar: '2.0.0' } }, + ], 'foo') + const fooManifest = registry.manifest({ name: 'foo', packuments: fooPackuments }) + const barPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { baz: '1.0.0' } }, + { version: '1.0.1', dependencies: { baz: '1.0.1' } }, + { version: '2.0.0', dependencies: { baz: '2.0.0' } }, + ], 'bar') + const barManifest = registry.manifest({ name: 'bar', packuments: barPackuments }) + const bazPackuments = registry.packuments([ + { version: '1.0.0', dependencies: { foo: '1.0.0' } }, + { version: '1.0.1', dependencies: { foo: '1.0.1' } }, + { version: '2.0.0', dependencies: { foo: '2.0.0' } }, + ], 'baz') + const bazManifest = registry.manifest({ name: 'baz', packuments: bazPackuments }) + await registry.package({ manifest: fooManifest, times: 2 }) + await registry.package({ manifest: barManifest, times: 2 }) + await registry.package({ manifest: bazManifest, times: 2 }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3858,23 +3877,26 @@ t.test('overrides', t => { // this tree creates an ERESOLVE due to a@1 having a peer on b@1 // and d@2 having a peer on b@2, to fix it we override the a@1 peer // to be b@2 - generateNocks(t, { - a: { - versions: ['1.0.0'], - peerDependencies: ['b'], - }, - b: { - versions: ['1.0.0', '2.0.0'], - peerDependencies: [{ '2.0.0': 'c' }], - }, - c: { - versions: ['2.0.0'], - }, - d: { - versions: ['2.0.0'], - peerDependencies: ['b'], - }, - }) + const registry = createRegistry(t, false) + const aPackuments = registry.packuments([ + { version: '1.0.0', peerDependencies: { b: '1.0.0' } }, + ], 'a') + const aManifest = registry.manifest({ name: 'a', packuments: aPackuments }) + const bPackuments = registry.packuments([ + { version: '1.0.0', peerDependencies: { c: '2.0.0' } }, + { version: '2.0.0', peerDependencies: { c: '2.0.0' } }, + ], 'b') + const bManifest = registry.manifest({ name: 'b', packuments: bPackuments }) + const cPackuments = registry.packuments(['2.0.0'], 'c') + const cManifest = registry.manifest({ name: 'c', packuments: cPackuments }) + const dPackuments = registry.packuments([ + { version: '2.0.0', peerDependencies: { b: '2.0.0' } }, + ], 'd') + const dManifest = registry.manifest({ name: 'd', packuments: dPackuments }) + await registry.package({ manifest: aManifest, times: 2 }) + await registry.package({ manifest: bManifest, times: 4 }) + await registry.package({ manifest: cManifest, times: 2 }) + await registry.package({ manifest: dManifest, times: 2 }) const pkg = { name: 'root', @@ -3921,11 +3943,10 @@ t.test('overrides', t => { }) t.test('overrides a workspace dependency', async (t) => { - generateNocks(t, { - bar: { - versions: ['1.0.0', '1.0.1', '2.0.0'], - }, - }) + const registry = createRegistry(t, false) + const packuments = registry.packuments(['1.0.0', '1.0.1', '2.0.0'], 'bar') + const manifest = registry.manifest({ name: 'bar', packuments }) + await registry.package({ manifest }) const path = t.testdir({ 'package.json': JSON.stringify({ @@ -3963,8 +3984,6 @@ t.test('overrides', t => { t.equal(fooBarEdge.valid, true) t.equal(fooBarEdge.to.version, '2.0.0') }) - - t.end() }) t.test('store files with a custom indenting', async t => { @@ -3976,6 +3995,7 @@ t.test('store files with a custom indenting', async t => { const path = t.testdir({ 'package.json': tabIndentedPackageJson, }) + createRegistry(t, true) const tree = await buildIdeal(path) t.matchSnapshot(String(tree.meta)) }) @@ -3991,6 +4011,7 @@ t.test('should take devEngines in account', async t => { }, }), }) + createRegistry(t, false) const tree = await buildIdeal(path) t.matchSnapshot(String(tree.meta)) }) diff --git a/workspaces/arborist/test/arborist/deduper.js b/workspaces/arborist/test/arborist/deduper.js index 92d86318fc316..b4e6076293477 100644 --- a/workspaces/arborist/test/arborist/deduper.js +++ b/workspaces/arborist/test/arborist/deduper.js @@ -1,9 +1,7 @@ const t = require('tap') -const Arborist = require('../../lib/arborist/index.js') - -const { start, stop, registry } = require('../fixtures/server.js') -t.before(start) -t.teardown(stop) +const { join } = require('node:path') +const Arborist = require('../..') +const MockRegistry = require('@npmcli/mock-registry') const { normalizePath, @@ -12,15 +10,25 @@ const { const cwd = normalizePath(process.cwd()) t.cleanSnapshot = s => s.split(cwd).join('{CWD}') - .split(registry).join('https://registry.npmjs.org/') const fixture = (t, p) => require('../fixtures/reify-cases/' + p)(t) - const cache = t.testdir() const dedupeTree = (path, opt) => - new Arborist({ registry, path, cache, save: false, ...(opt || {}) }).dedupe(opt) + new Arborist({ path, cache, save: false, ...(opt || {}) }).dedupe(opt) + +const createRegistry = t => { + const registry = new MockRegistry({ + strict: true, + tap: t, + registry: 'https://registry.npmjs.org', + }) + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) + return registry +} t.test('dedupes with actual tree', async t => { + const registry = createRegistry(t) + registry.audit({}) const path = fixture(t, 'dedupe-actual') const tree = await dedupeTree(path) const dep = tree.children.get('@isaacs/dedupe-tests-a') @@ -31,6 +39,8 @@ t.test('dedupes with actual tree', async t => { }) t.test('dedupes with lockfile', async t => { + const registry = createRegistry(t) + registry.audit({}) const path = fixture(t, 'dedupe-lockfile') const tree = await dedupeTree(path) const dep = tree.children.get('@isaacs/dedupe-tests-a') diff --git a/workspaces/arborist/test/arborist/pruner.js b/workspaces/arborist/test/arborist/pruner.js index 7c4bec0c5e2ed..4a6ae4fd484bb 100644 --- a/workspaces/arborist/test/arborist/pruner.js +++ b/workspaces/arborist/test/arborist/pruner.js @@ -1,12 +1,9 @@ -const { resolve } = require('node:path') +const { resolve, join } = require('node:path') +const fs = require('node:fs') const t = require('tap') -const Arborist = require('../../lib/arborist/index.js') - -const { start, stop, registry } = require('../fixtures/server.js') - -t.before(start) -t.teardown(stop) +const Arborist = require('../..') +const MockRegistry = require('@npmcli/mock-registry') const { normalizePath, @@ -15,13 +12,15 @@ const { const cwd = normalizePath(process.cwd()) t.cleanSnapshot = s => s.split(cwd).join('{CWD}') - .split(registry).join('https://registry.npmjs.org/') const fixture = (t, p) => require('../fixtures/reify-cases/' + p)(t) +const registry = new MockRegistry({ + strict: true, + tap: t, + registry: 'https://registry.npmjs.org', +}) -const cache = t.testdir() -const pruneTree = (path, opt) => - new Arborist({ registry, path, cache, ...(opt || {}) }).prune(opt) +const pruneTree = (path, opt) => new Arborist({ path, ...(opt || {}) }).prune(opt) t.test('prune with actual tree', async t => { const path = fixture(t, 'prune-actual') @@ -103,7 +102,7 @@ t.test('prune omit dev with bins', async t => { const devDep = tree.children.get('yes') t.notOk(devDep, 'all listed dev deps pruned from tree') - // should also remove ./bin/* files + // should also remove ./bin[> files const bin = resolve(path, 'node_modules/.bin/yes') if (process.platform === 'win32') { t.throws(() => statSync(bin + '.cmd').isFile(), /ENOENT/, 'should not have shim') @@ -113,8 +112,7 @@ t.test('prune omit dev with bins', async t => { }) t.test('prune workspaces', async t => { - const fs = require('node:fs') - const { join } = require('node:path') + registry.audit({}) const path = t.testdir({ 'package.json': JSON.stringify({ name: 'prune-workspaces', diff --git a/workspaces/arborist/test/arborist/rebuild.js b/workspaces/arborist/test/arborist/rebuild.js index b53b7309a4870..7be5d0059cf5a 100644 --- a/workspaces/arborist/test/arborist/rebuild.js +++ b/workspaces/arborist/test/arborist/rebuild.js @@ -1,30 +1,26 @@ const t = require('tap') const _trashList = Symbol.for('trashList') -const Arborist = require('../../lib/arborist/index.js') +const Arborist = require('../..') const { resolve, dirname } = require('node:path') const os = require('node:os') const fs = require('node:fs') const fixtures = resolve(__dirname, '../fixtures') const relpath = require('../../lib/relpath.js') const localeCompare = require('@isaacs/string-locale-compare')('en') +const MockRegistry = require('@npmcli/mock-registry') const fixture = (t, p) => require(`${fixtures}/reify-cases/${p}`)(t) -const isWindows = process.platform === 'win32' -const PORT = 12345 + (+process.env.TAP_CHILD_ID || 0) - -const server = require('node:http').createServer(() => { - throw new Error('rebuild should not hit the registry') +// Spin up a new mock registry with no mocks in strict mode so we ensure that no requests are made +new MockRegistry({ + strict: true, + tap: t, + registry: 'https://registry.npmjs.org', }) -t.before(() => new Promise(res => { - server.listen(PORT, () => { - t.teardown(() => server.close()) - res() - }) -})) -const registry = `http://localhost:${PORT}` -const newArb = opt => new Arborist({ ...opt, registry }) +const isWindows = process.platform === 'win32' + +const newArb = opt => new Arborist(opt) // track the logs that are emitted. returns a function that removes // the listener and provides the list of what it saw. @@ -225,7 +221,7 @@ t.test('run scripts in foreground if foregroundScripts set', async t => { }, }) - const arb = new Arborist({ path, registry, foregroundScripts: true }) + const arb = new Arborist({ path, foregroundScripts: true }) await arb.rebuild() // add a sentinel to make sure we didn't get too many entries, since // t.match() will allow trailing/extra values in the test object. @@ -414,7 +410,7 @@ t.test('rebuild node-gyp dependencies lacking both preinstall and install script }, }), }) - const arb = new Arborist({ path, registry }) + const arb = new Arborist({ path }) await arb.rebuild() t.match(RUNS, [ { @@ -460,7 +456,7 @@ t.test('do not rebuild node-gyp dependencies with gypfile:false', async t => { }, }), }) - const arb = new Arborist({ path, registry }) + const arb = new Arborist({ path }) await arb.rebuild() }) @@ -498,7 +494,7 @@ t.test('do not run lifecycle scripts of linked deps twice', async t => { return require('@npmcli/run-script')(opts) }, }) - const arb = new Arborist({ path, registry }) + const arb = new Arborist({ path }) await arb.rebuild() t.equal(RUNS.length, 1, 'should run postinstall script only once') t.match(RUNS, [ @@ -544,7 +540,7 @@ t.test('workspaces', async t => { return require('@npmcli/run-script')(opts) }, }) - const arb = new Arborist({ path, registry }) + const arb = new Arborist({ path }) await arb.rebuild() t.equal(RUNS.length, 2, 'should run prepare script only once per ws') @@ -593,7 +589,7 @@ t.test('workspaces', async t => { return { code: 0, signal: null } }, }) - const arb = new Arborist({ path, registry, binLinks: false }) + const arb = new Arborist({ path, binLinks: false }) await arb.rebuild() t.equal(RUNS.length, 1, 'should run prepare script only once') @@ -641,7 +637,7 @@ t.test('workspaces', async t => { return { code: 0, signal: null } }, }) - const arb = new Arborist({ path, registry }) + const arb = new Arborist({ path }) await arb.rebuild() t.equal(RUNS.length, 1, 'should run prepare script only once') @@ -804,7 +800,6 @@ t.test('no workspaces', async t => { }) const arb = new Arborist({ path, - registry, workspacesEnabled: false, }) diff --git a/workspaces/arborist/test/arborist/reify.js b/workspaces/arborist/test/arborist/reify.js index 0a7fb416040c0..65148c9993f80 100644 --- a/workspaces/arborist/test/arborist/reify.js +++ b/workspaces/arborist/test/arborist/reify.js @@ -2,10 +2,12 @@ const { join, resolve, basename } = require('node:path') const t = require('tap') const runScript = require('@npmcli/run-script') const localeCompare = require('@isaacs/string-locale-compare')('en') +// TODO mock registry (will require several because of differing hosts const tnock = require('../fixtures/tnock') const fs = require('node:fs') const fsp = require('node:fs/promises') const npmFs = require('@npmcli/fs') +const MockRegistry = require('@npmcli/mock-registry') let failRm = false let failRename = null @@ -91,7 +93,7 @@ const debugLogTracker = () => { } const mockDebug = Object.assign(fn => fn(), { log: () => {} }) -const Arborist = t.mock('../../lib/index.js', { +const Arborist = t.mock('../..', { ...mocks, // need to not mock this one, so we still can swap the process object '../../lib/signal-handling.js': require('../../lib/signal-handling.js'), @@ -102,18 +104,6 @@ const Arborist = t.mock('../../lib/index.js', { const { Node, Link, Shrinkwrap } = Arborist -const { - start, - stop, - registry, - advisoryBulkResponse, -} = require('../fixtures/server.js') - -t.before(start) -t.teardown(stop) - -const cache = t.testdir() - const { normalizePath, normalizePaths, @@ -122,64 +112,86 @@ const { const cwd = normalizePath(process.cwd()) t.cleanSnapshot = s => s.split(cwd).join('{CWD}') - .split(registry).join('https://registry.npmjs.org/') const fixture = (t, p) => require('../fixtures/reify-cases/' + p)(t) const printReified = (path, opt) => reify(path, opt).then(printTree) +const fixtures = join(__dirname, '..', 'fixtures') +const createRegistry = (t, mocks) => { + const registry = new MockRegistry({ + strict: true, + tap: t, + registry: 'https://registry.npmjs.org', + }) + if (mocks) { + registry.mocks({ dir: join(__dirname, '..', 'fixtures') }) + } + return registry +} const newArb = (opt) => new Arborist({ audit: false, - cache, - registry, + cache: opt.path, // give it a very long timeout so CI doesn't crash as easily timeout: 30 * 60 * 1000, ...opt, }) -const reify = (path, opt) => newArb({ path, ...(opt || {}) }).reify(opt) +const reify = (path, opt = {}) => newArb({ path, ...opt }).reify(opt) t.test('bundled file dep with same name as other dep', async t => { + const registry = createRegistry(t) + registry.mocks({ dir: fixtures }) const tree = await printReified(fixture(t, 'conflict-bundle-file-dep')) t.matchSnapshot(tree) }) -t.test('tarball deps with transitive tarball deps', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'tarball-dependencies')))) +t.test('tarball deps with transitive tarball deps', async t => { + createRegistry(t) + await t.resolveMatchSnapshot(printReified(fixture(t, 'tarball-dependencies'))) +}) t.test('update a yarn.lock file', async t => { const path = fixture(t, 'yarn-lock-mkdirp') + createRegistry(t, true) const tree = await reify(path, { add: ['abbrev'] }) t.matchSnapshot(printTree(tree), 'add abbrev') t.matchSnapshot(fs.readFileSync(path + '/yarn.lock', 'utf8'), 'updated yarn lock') }) -t.test('weirdly broken lockfile without resolved value', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'dep-missing-resolved')))) +t.test('weirdly broken lockfile without resolved value', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'dep-missing-resolved'))) +}) -t.test('testing-peer-deps package', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps')))) +t.test('testing-peer-deps package', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps'))) +}) -t.test('just the shrinkwrap', t => { - const paths = [ - 'cli-750-fresh', - 'yarn-lock-mkdirp', - ] - t.plan(paths.length) - for (const p of paths) { - t.test(p, async t => { - const path = fixture(t, p) - const arb = newArb({ path, audit: true, packageLockOnly: true }) - await arb.reify() - t.ok(arb.auditReport, 'got an audit report') - t.throws(() => fs.statSync(path + '/node_modules'), { code: 'ENOENT' }) - t.matchSnapshot(fs.readFileSync(path + '/package-lock.json', 'utf8')) - }) - } +t.test('just the shrinkwrap', async t => { + await t.test('cli-750-fresh', async t => { + const path = fixture(t, 'cli-750-fresh') + createRegistry(t, false) + const arb = newArb({ path, audit: true, packageLockOnly: true }) + await arb.reify() + t.throws(() => fs.statSync(path + '/node_modules'), { code: 'ENOENT' }) + t.matchSnapshot(fs.readFileSync(path + '/package-lock.json', 'utf8')) + }) + await t.test('yarn-lock-mkdirp', async t => { + const path = fixture(t, 'yarn-lock-mkdirp') + const registry = createRegistry(t, true) + registry.audit({}) + const arb = newArb({ path, audit: true, packageLockOnly: true }) + await arb.reify() + t.throws(() => fs.statSync(path + '/node_modules'), { code: 'ENOENT' }) + t.matchSnapshot(fs.readFileSync(path + '/package-lock.json', 'utf8')) + }) }) t.test('packageLockOnly can add deps', async t => { const path = t.testdir({ 'package.json': '{}' }) + createRegistry(t, true) await reify(path, { add: ['abbrev'], packageLockOnly: true }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) t.matchSnapshot(fs.readFileSync(path + '/package-lock.json', 'utf8')) @@ -190,6 +202,7 @@ t.test('malformed package.json should not be overwitten', async t => { t.plan(2) const path = fixture(t, 'malformed-json') + createRegistry(t, false) const originalContent = fs.readFileSync(path + '/package.json', 'utf8') try { @@ -208,11 +221,13 @@ t.test('malformed package.json should not be overwitten', async t => { t.test('packageLockOnly does not work on globals', t => { const path = t.testdir({ 'package.json': '{}' }) + createRegistry(t, false) return t.rejects(() => reify(path, { global: true, packageLockOnly: true })) }) t.test('omit peer deps', t => { const path = fixture(t, 'testing-peer-deps') + createRegistry(t, true) // in this one we also snapshot the timers, mostly just as a smoke test const timers = {} const finishedTimers = [] @@ -253,20 +268,27 @@ t.test('omit peer deps', t => { }) }) -t.test('testing-peer-deps nested', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps-nested')))) +t.test('testing-peer-deps nested', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps-nested'))) +}) -t.test('a workspace with a duplicated nested conflicted dep', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'workspace4')))) +t.test('a workspace with a duplicated nested conflicted dep', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'workspace4'))) +}) -t.test('testing-peer-deps nested with update', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps-nested'), { +t.test('testing-peer-deps nested with update', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps-nested'), { update: { names: ['@isaacs/testing-peer-deps'] }, save: false, - }))) + })) +}) t.test('update a bundling node without updating all of its deps', t => { const path = fixture(t, 'tap-react15-collision-legacy-sw') + createRegistry(t, true) // check that it links the bin const bin = resolve(path, 'node_modules/.bin/tap') @@ -295,6 +317,7 @@ t.test('update a bundling node without updating all of its deps', t => { t.test('Bundles rebuilt as long as rebuildBundle not false', async t => { t.test('rebuild the bundle', async t => { const path = fixture(t, 'testing-rebuild-bundle') + createRegistry(t, true) const a = resolve(path, 'node_modules/@isaacs/testing-rebuild-bundle-a') const dir = resolve(a, 'node_modules/@isaacs/testing-rebuild-bundle-b') const file = resolve(dir, 'cwd') @@ -303,6 +326,7 @@ t.test('Bundles rebuilt as long as rebuildBundle not false', async t => { }) t.test('do not rebuild the bundle', async t => { const path = fixture(t, 'testing-rebuild-bundle') + createRegistry(t, true) const a = resolve(path, 'node_modules/@isaacs/testing-rebuild-bundle-a') const dir = resolve(a, 'node_modules/@isaacs/testing-rebuild-bundle-b') const file = resolve(dir, 'cwd') @@ -313,6 +337,7 @@ t.test('Bundles rebuilt as long as rebuildBundle not false', async t => { t.test('transitive deps containing asymmetrical bin no lockfile', t => { const path = fixture(t, 'testing-asymmetrical-bin-no-lock') + createRegistry(t, true) // check that it links the bin const bin = resolve(path, 'node_modules/.bin/b') @@ -327,6 +352,7 @@ t.test('transitive deps containing asymmetrical bin no lockfile', t => { t.test('transitive deps containing asymmetrical bin with lockfile', t => { const path = fixture(t, 'testing-asymmetrical-bin-with-lock') + createRegistry(t, true) // check that it links the bin const bin = resolve(path, 'node_modules/.bin/b') @@ -340,9 +366,9 @@ t.test('transitive deps containing asymmetrical bin with lockfile', t => { t.test('omit optional dep', t => { const path = fixture(t, 'tap-react15-collision-legacy-sw') - const ignoreScripts = true + createRegistry(t, true) - const arb = newArb({ path, ignoreScripts }) + const arb = newArb({ path, ignoreScripts: true }) // eslint-disable-next-line promise/always-return return arb.reify({ omit: ['optional'] }).then(tree => { t.equal(tree.children.get('fsevents'), undefined, 'no fsevents in tree') @@ -359,27 +385,34 @@ t.test('dev, optional, devOptional flags and omissions', t => { const path = 'testing-dev-optional-flags' const omits = [['dev'], ['dev', 'optional'], ['optional']] t.plan(omits.length) - omits.forEach(omit => t.test(omit.join(','), t => - t.resolveMatchSnapshot(printReified(fixture(t, path), { + omits.forEach(omit => t.test(omit.join(','), async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, path), { omit, - })))) + })) + })) }) t.test('omits when both dev and optional flags are set', t => { const path = 'testing-dev-optional-flags-2' const omits = [['dev'], ['optional']] t.plan(omits.length) - omits.forEach(omit => t.test(omit.join(','), t => - t.resolveMatchSnapshot(printReified(fixture(t, path), { + omits.forEach(omit => t.test(omit.join(','), async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, path), { omit, - })))) + })) + })) }) -t.test('bad shrinkwrap file', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps-bad-sw')))) +t.test('bad shrinkwrap file', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'testing-peer-deps-bad-sw'))) +}) t.test('multiple bundles at the same level', t => { const path = fixture(t, 'two-bundled-deps') + createRegistry(t, true) const a = newArb({ path }) return a.reify().then(tree => { const root = tree.root @@ -401,23 +434,28 @@ t.test('multiple bundles at the same level', t => { }) }) -t.test('update a node without updating its children', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'once-outdated'), - { update: { names: ['once'] }, save: false }))) +t.test('update a node without updating its children', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'once-outdated'), + { update: { names: ['once'] }, save: false })) +}) -t.test('do not add shrinkwrapped deps', t => - t.resolveMatchSnapshot(printReified( - fixture(t, 'shrinkwrapped-dep-no-lock'), { update: true }))) +t.test('do not add shrinkwrapped deps', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified( + fixture(t, 'shrinkwrapped-dep-no-lock'), { update: true })) +}) -t.test('do not update shrinkwrapped deps', t => - t.resolveMatchSnapshot(printReified( +t.test('do not update shrinkwrapped deps', async t => { + createRegistry(t, false) + await t.resolveMatchSnapshot(printReified( fixture(t, 'shrinkwrapped-dep-with-lock'), - { update: { names: ['abbrev'] } }))) + { update: { names: ['abbrev'] } })) +}) t.test('tracks changes of shrinkwrapped dep correctly', async t => { - const path = t.testdir({ - 'package.json': '{}', - }) + const path = t.testdir({ 'package.json': '{}' }) + createRegistry(t, true) const install1 = await printReified(path, { add: ['@nlf/shrinkwrapped-dep-updates-a@1.0.0'] }) t.matchSnapshot(install1, 'install added the correct tree') @@ -437,62 +475,78 @@ t.test('tracks changes of shrinkwrapped dep correctly', async t => { t.match(repair, install2, 'tree got repaired') }) -t.test('do not install optional deps with mismatched platform specifications', t => - t.resolveMatchSnapshot(printReified( - fixture(t, 'optional-platform-specification')))) +t.test('do not install optional deps with mismatched platform specifications', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'optional-platform-specification'))) +}) -t.test('still do not install optional deps with mismatched platform specifications even when forced', t => - t.resolveMatchSnapshot(printReified( - fixture(t, 'optional-platform-specification'), { force: true }))) +t.test('still do not install optional deps with mismatched platform specifications even when forced', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'optional-platform-specification'), { force: true })) +}) -t.test('fail to install deps with mismatched platform specifications', t => - t.rejects(printReified(fixture(t, 'platform-specification')), { code: 'EBADPLATFORM' })) +t.test('fail to install deps with mismatched platform specifications', async t => { + createRegistry(t, true) + await t.rejects(printReified(fixture(t, 'platform-specification')), { code: 'EBADPLATFORM' }) +}) -t.test('success to install optional deps with matched platform specifications with os and cpu and libc options', t => - t.resolveMatchSnapshot(printReified( - fixture(t, 'optional-platform-specification'), { os: 'not-your-os', cpu: 'not-your-cpu', libc: 'not-your-libc' }))) +t.test('success to install optional deps with matched platform specifications with os and cpu and libc options', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified( + fixture(t, 'optional-platform-specification'), { os: 'not-your-os', cpu: 'not-your-cpu', libc: 'not-your-libc' })) +}) -t.test('fail to install optional deps with matched os and mismatched cpu with os and cpu and libc options', t => - t.resolveMatchSnapshot(printReified( - fixture(t, 'optional-platform-specification'), { os: 'not-your-os', cpu: 'another-cpu', libc: 'not-your-libc' }))) +t.test('fail to install optional deps with matched os and mismatched cpu with os and cpu and libc options', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified( + fixture(t, 'optional-platform-specification'), { os: 'not-your-os', cpu: 'another-cpu', libc: 'not-your-libc' })) +}) -t.test('fail to install optional deps with mismatched os and matched cpu with os and cpu and libc options', t => - t.resolveMatchSnapshot(printReified( - fixture(t, 'optional-platform-specification'), { os: 'another-os', cpu: 'not-your-cpu', libc: 'not-your-libc' }))) +t.test('fail to install optional deps with mismatched os and matched cpu with os and cpu and libc options', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified( + fixture(t, 'optional-platform-specification'), { os: 'another-os', cpu: 'not-your-cpu', libc: 'not-your-libc' })) +}) -t.test('fail to install optional deps with matched os and matched cpu and mismatched libc with os and cpu and libc options', t => - t.resolveMatchSnapshot(printReified( - fixture(t, 'optional-platform-specification'), { os: 'another-os', cpu: 'not-your-cpu', libc: 'not-your-libc' }))) +t.test('fail to install optional deps with matched os and matched cpu and mismatched libc with os and cpu and libc options', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified( + fixture(t, 'optional-platform-specification'), { os: 'another-os', cpu: 'not-your-cpu', libc: 'not-your-libc' })) +}) t.test('dry run, do not get anything wet', async t => { const cases = [ - 'shrinkwrapped-dep-with-lock-empty', - 'shrinkwrapped-dep-no-lock-empty', - 'link-dep-empty', - 'link-meta-deps-empty', - 'testing-bundledeps-empty', + ['shrinkwrapped-dep-with-lock-empty', false], + ['shrinkwrapped-dep-no-lock-empty', true], + ['link-dep-empty', false], + ['link-meta-deps-empty', true], + ['testing-bundledeps-empty', true], ] t.plan(cases.length) - cases.forEach(c => t.test(c, async t => { - const path = fixture(t, c) - const arb = newArb({ path, dryRun: true }) - t.matchSnapshot(printTree(await arb.reify())) - t.throws(() => fs.statSync(resolve(path, 'node_modules'))) - t.ok(arb.diff) - })) + for (const [c, mocks] of cases) { + await t.test(c, async t => { + const path = fixture(t, c) + createRegistry(t, mocks) + const arb = newArb({ path, dryRun: true }) + t.matchSnapshot(printTree(await arb.reify())) + t.throws(() => fs.statSync(resolve(path, 'node_modules'))) + t.ok(arb.diff) + }) + } }) t.test('reifying with shronk warp dep', t => { const cases = [ - 'shrinkwrapped-dep-with-lock', - 'shrinkwrapped-dep-with-lock-empty', - 'shrinkwrapped-dep-no-lock', - 'shrinkwrapped-dep-no-lock-empty', + ['shrinkwrapped-dep-with-lock', false], + ['shrinkwrapped-dep-with-lock-empty', true], + ['shrinkwrapped-dep-no-lock', true], + ['shrinkwrapped-dep-no-lock-empty', true], ] t.plan(cases.length) - for (const c of cases) { + for (const [c, mocks] of cases) { t.test(c, async t => { const path = fixture(t, c) + createRegistry(t, mocks) const tree = await printReified(path, { // set update so that we don't start the idealTree // with the actualTree, and can see that the deps @@ -506,40 +560,52 @@ t.test('reifying with shronk warp dep', t => { } }) -t.test('link deps already in place', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'link-dep')))) -t.test('create link deps', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'link-dep-empty')))) +t.test('link deps already in place', async t => { + createRegistry(t, false) + await t.resolveMatchSnapshot(printReified(fixture(t, 'link-dep'))) +}) + +t.test('create link deps', async t => { + createRegistry(t, false) + await t.resolveMatchSnapshot(printReified(fixture(t, 'link-dep-empty'))) +}) + +t.test('link meta deps, fresh install', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'link-meta-deps-empty'))) +}) -t.test('link meta deps, fresh install', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'link-meta-deps-empty')))) -t.test('link meta deps, update', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'link-meta-deps'), { +t.test('link meta deps, update', async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, 'link-meta-deps'), { // use legacy nesting so we leave the link nested legacyNesting: true, add: [ '@isaacs/testing-link-dep@2', '@isaacs/testing-link-dev-dep@2', ], - }))) + })) +}) -t.test('update a child of a node with bundled deps', t => { +t.test('update a child of a node with bundled deps', async t => { const path = fixture(t, 'testing-bundledeps-legacy-bundling') - return t.resolveMatchSnapshot(printReified(path, { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(path, { update: ['@isaacs/testing-bundledeps-c'], installStrategy: 'nested', })) }) -t.test('update a node without updating a child that has bundle deps', t => { +t.test('update a node without updating a child that has bundle deps', async t => { const path = fixture(t, 'testing-bundledeps-3') - return t.resolveMatchSnapshot(printReified(path, { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(path, { update: ['@isaacs/testing-bundledeps-parent'], save: false, })) }) -t.test('optional dependency failures', t => { +t.test('optional dependency failures', async t => { const cases = [ 'optional-dep-tgz-missing', 'optional-metadep-tgz-missing', @@ -552,22 +618,28 @@ t.test('optional dependency failures', t => { 'optional-metadep-postinstall-fail', 'optional-metadep-allinstall-fail', ] - t.plan(cases.length * 2) - let p = [...cases.map(c => t.test(`${c} save=false`, t => - t.resolveMatchSnapshot(printReified(fixture(t, c), - { update: true, save: false }))))] - - // npm update --save - p = [...cases.map(c => t.test(`${c} save=true`, t => - t.resolveMatchSnapshot(printReified(fixture(t, c), - { update: true, save: true }))))] - return p + // t.plan(cases.length * 2) + for (const c of cases) { + await t.test(`${c} save=false`, async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, c), + { update: true, save: false })) + }) + // npm update --save + await t.test(`${c} save=true`, async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified(fixture(t, c), + { update: true, save: true })) + }) + } }) -t.test('failure to fetch prod dep is failure', t => - t.rejects(printReified(fixture(t, 'prod-dep-tgz-missing')))) +t.test('failure to fetch prod dep is failure', async t => { + createRegistry(t, true) + t.rejects(printReified(fixture(t, 'prod-dep-tgz-missing'))) +}) -t.test('failing script means install failure, unless ignoreScripts', t => { +t.test('failing script means install failure, unless ignoreScripts', async t => { const cases = [ 'prod-dep-preinstall-fail', 'prod-dep-install-fail', @@ -575,28 +647,34 @@ t.test('failing script means install failure, unless ignoreScripts', t => { 'prod-dep-allinstall-fail', ] - t.plan(cases.length * 2) - - cases.forEach(c => { - t.test(c, t => - t.rejects(printReified(fixture(t, c)))) - t.test(c + ' --ignore-scripts', t => - t.resolveMatchSnapshot(printReified( - fixture(t, c), { ignoreScripts: true }))) - }) + for (const c of cases) { + await t.test(c, async t => { + createRegistry(t, true) + t.rejects(printReified(fixture(t, c))) + }) + await t.test(`${c} --ignore-scripts`, async t => { + createRegistry(t, true) + await t.resolveMatchSnapshot(printReified( + fixture(t, c), { ignoreScripts: true })) + }) + } }) -t.test('link metadep', t => { +t.test('link metadep', async t => { const cases = [ 'cli-750', 'cli-750-fresh', ] - t.plan(cases.length) - cases.forEach(c => t.test(c, t => - t.resolveMatchSnapshot(printReified(fixture(t, c))))) + for (const c of cases) { + createRegistry(t, false) + await t.test(c, async t => { + t.resolveMatchSnapshot(printReified(fixture(t, c))) + }) + } }) t.test('warn on reifying deprecated dependency', t => { + createRegistry(t, true) const a = newArb({ path: fixture(t, 'deprecated-dep'), lockfileVersion: 1, @@ -616,6 +694,7 @@ t.test('warn on reifying deprecated dependency', t => { t.test('rollbacks', { buffered: false }, t => { t.test('fail retiring shallow nodes', t => { const path = fixture(t, 'testing-bundledeps-3') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const expect = new Error('rename fail') const kRenamePath = Symbol.for('renamePath') @@ -643,6 +722,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail retiring nodes because rm fails after eexist', t => { const path = fixture(t, 'testing-bundledeps-3') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const eexist = new Error('rename fail') eexist.code = 'EEXIST' @@ -680,6 +760,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail retiring node, but then rm fixes it', async t => { const path = fixture(t, 'testing-bundledeps-3') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const eexist = new Error('rename fail') eexist.code = 'EEXIST' @@ -708,6 +789,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail creating sparse tree', t => { t.teardown(() => failMkdir = null) const path = fixture(t, 'testing-bundledeps-3') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const kCreateST = Symbol.for('createSparseTree') const createSparseTree = a[kCreateST] @@ -733,6 +815,7 @@ t.test('rollbacks', { buffered: false }, t => { failMkdir = null failRm = null const path = fixture(t, 'testing-bundledeps-3') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const kCreateST = Symbol.for('createSparseTree') @@ -781,6 +864,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail loading shrinkwraps and updating trees', t => { const path = fixture(t, 'shrinkwrapped-dep-no-lock-empty') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const kLoadSW = Symbol.for('loadShrinkwrapsAndUpdateTrees') const loadShrinkwrapsAndUpdateTrees = a[kLoadSW] @@ -807,6 +891,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail loading bundles and updating trees', t => { const path = fixture(t, 'two-bundled-deps') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const kLoadBundles = Symbol.for('loadBundlesAndUpdateTrees') const loadBundlesAndUpdateTrees = a[kLoadBundles] @@ -825,6 +910,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail unpacking new modules', t => { const path = fixture(t, 'two-bundled-deps') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const kUnpack = Symbol.for('unpackNewModules') const unpackNewModules = a[kUnpack] @@ -843,6 +929,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail moving back retired unchanged', t => { const path = fixture(t, 'testing-bundledeps-3') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const kMoveback = Symbol.for('moveBackRetiredUnchanged') @@ -871,6 +958,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('fail removing retired and deleted nodes', t => { const path = fixture(t, 'testing-bundledeps-3') + createRegistry(t, true) const a = newArb({ path, installStrategy: 'nested' }) const kRemove = Symbol.for('removeTrash') const removeRetiredAndDeletedNodes = a[kRemove] @@ -907,6 +995,7 @@ t.test('rollbacks', { buffered: false }, t => { t.test('saving the ideal tree', t => { const kSaveIdealTree = Symbol.for('saveIdealTree') t.test('save=false', async t => { + createRegistry(t, false) // doesn't actually do anything, just for coverage. // if it wasn't an early exit, it'd blow up and throw // an error though. @@ -916,6 +1005,7 @@ t.test('saving the ideal tree', t => { }) t.test('save some stuff', t => { + createRegistry(t, false) const pkg = { bundleDependencies: ['a', 'b', 'c'], dependencies: { @@ -1077,6 +1167,7 @@ t.test('saving the ideal tree', t => { t.test('scoped registries', async t => { const path = t.testdir() + // TODO // this is a very artifical test that is setting a lot of internal things // up so that we assert that the intended behavior of sending right // resolved data for pacote.extract is working as intended, alternatively @@ -1100,8 +1191,7 @@ t.test('scoped registries', async t => { const a = new ArboristMock({ audit: false, path, - cache, - registry, + cache: path, }) const kReify = Symbol.for('reifyNode') a.addTracker('reify') @@ -1131,6 +1221,7 @@ t.test('bin links adding and removing', t => { t.test('global style', t => { const path = t.testdir() + createRegistry(t, true) const nm = resolve(path, 'node_modules') const rbinPart = '.bin/rimraf' + (process.platform === 'win32' ? '.cmd' : '') @@ -1140,7 +1231,7 @@ t.test('global style', t => { .then(() => t.strictSame(fs.readdirSync(nm).sort(), ['.bin', '.package-lock.json', 'rimraf'])) }) -t.test('global', t => { +t.test('global', async t => { const isWindows = process.platform === 'win32' const path = t.testdir({ lib: {} }) @@ -1151,43 +1242,53 @@ t.test('global', t => { const rimrafBin = resolve(binTarget, isWindows ? 'rimraf.cmd' : 'rimraf') const semverBin = resolve(binTarget, isWindows ? 'semver.cmd' : 'semver') - t.test('add rimraf', t => - reify(lib, { add: ['rimraf@2'], global: true }) + await t.test('add rimraf', async t => { + createRegistry(t, true) + await reify(lib, { add: ['rimraf@2'], global: true }) .then(() => fs.statSync(rimrafBin)) - .then(() => t.strictSame(fs.readdirSync(nm), ['rimraf']))) + .then(() => t.strictSame(fs.readdirSync(nm), ['rimraf'])) + }) - t.test('add semver', t => - reify(lib, { add: ['semver@6.3.0'], global: true }) + await t.test('add semver', async t => { + createRegistry(t, true) + await reify(lib, { add: ['semver@6.3.0'], global: true }) .then(() => fs.statSync(rimrafBin)) .then(() => fs.statSync(semverBin)) - .then(() => t.strictSame(fs.readdirSync(nm).sort(), ['rimraf', 'semver']))) + .then(() => t.strictSame(fs.readdirSync(nm).sort(), ['rimraf', 'semver'])) + }) - t.test('remove semver', t => - reify(lib, { rm: ['semver'], global: true }) + await t.test('remove semver', async t => { + createRegistry(t, false) + await reify(lib, { rm: ['semver'], global: true }) .then(() => fs.statSync(rimrafBin)) .then(() => t.throws(() => fs.statSync(semverBin))) - .then(() => t.strictSame(fs.readdirSync(nm), ['rimraf']))) + .then(() => t.strictSame(fs.readdirSync(nm), ['rimraf'])) + }) - t.test('remove rimraf', t => - reify(lib, { rm: ['rimraf'], global: true }) + await t.test('remove rimraf', async t => { + createRegistry(t, false) + await reify(lib, { rm: ['rimraf'], global: true }) .then(() => t.throws(() => fs.statSync(rimrafBin))) .then(() => t.throws(() => fs.statSync(semverBin))) - .then(() => t.strictSame(fs.readdirSync(nm), []))) + .then(() => t.strictSame(fs.readdirSync(nm), [])) + }) - t.test('add without bin links', t => - reify(lib, { add: ['rimraf@2'], global: true, binLinks: false }) + await t.test('add without bin links', async t => { + createRegistry(t, true) + await reify(lib, { add: ['rimraf@2'], global: true, binLinks: false }) .then(() => t.throws(() => fs.statSync(rimrafBin))) .then(() => t.throws(() => fs.statSync(semverBin))) - .then(() => t.strictSame(fs.readdirSync(nm), ['rimraf']))) - - t.end() + .then(() => t.strictSame(fs.readdirSync(nm), ['rimraf'])) + }) }) -t.test('workspaces', t => { - t.test('reify simple-workspaces', t => - t.resolveMatchSnapshot(printReified(fixture(t, 'workspaces-simple')), 'should reify simple workspaces')) +t.test('workspaces', async t => { + await t.test('reify simple-workspaces', async t => { + createRegistry(t, false) + await t.resolveMatchSnapshot(printReified(fixture(t, 'workspaces-simple')), 'should reify simple workspaces') + }) - t.test('reify workspaces omit dev dependencies', async t => { + await t.test('reify workspaces omit dev dependencies', async t => { const runCase = async (t, opts) => { const path = fixture(t, 'workspaces-conflicting-dev-deps') const rootAjv = resolve(path, 'node_modules/ajv/package.json') @@ -1208,6 +1309,7 @@ t.test('workspaces', t => { } await t.test('default', async t => { + createRegistry(t, false) const { root, a, b } = await runCase(t) t.equal(root.exists(), false, 'root') t.equal(a.exists(), false, 'a') @@ -1215,6 +1317,7 @@ t.test('workspaces', t => { }) await t.test('workspaces only', async t => { + createRegistry(t, false) const { root, a, b } = await runCase(t, { workspaces: ['a'] }) t.equal(root.exists(), false, 'root') t.equal(a.exists(), false, 'a') @@ -1222,6 +1325,7 @@ t.test('workspaces', t => { }) await t.test('workspaces + root', async t => { + createRegistry(t, false) const { root, a, b } = await runCase(t, { workspaces: ['a'], includeWorkspaceRoot: true }) t.equal(root.exists(), false, 'root') t.equal(a.exists(), false, 'a') @@ -1229,6 +1333,7 @@ t.test('workspaces', t => { }) await t.test('disable workspaces', async t => { + createRegistry(t, false) const { root, a, b } = await runCase(t, { workspacesEnabled: false }) t.equal(root.exists(), false, 'root') t.equal(a.exists(), true, 'a') @@ -1236,14 +1341,16 @@ t.test('workspaces', t => { }) }) - t.test('reify workspaces lockfile', async t => { + await t.test('reify workspaces lockfile', async t => { const path = fixture(t, 'workspaces-simple') + createRegistry(t, false) await reify(path) t.matchSnapshot(require(path + '/package-lock.json'), 'should lock workspaces config') }) - t.test('reify workspaces bin files', t => { + await t.test('reify workspaces bin files', t => { const path = fixture(t, 'workspaces-link-bin') + createRegistry(t, false) const bins = [ resolve(path, 'node_modules/.bin/a'), @@ -1264,29 +1371,32 @@ t.test('workspaces', t => { .then(checkBin) }) - t.test('reify from an actual loaded workspace env', t => - t.resolveMatchSnapshot( + await t.test('reify from an actual loaded workspace env', async t => { + createRegistry(t, false) + await t.resolveMatchSnapshot( printReified(fixture(t, 'workspaces-non-simplistic')), 'should not clean up entire nm folder for no reason' - )) + ) + }) - t.test('add new workspaces dep', async t => { + await t.test('add new workspaces dep', async t => { + createRegistry(t, true) const path = fixture(t, 'workspaces-add-new-dep') await reify(path) t.matchSnapshot(require(path + '/package-lock.json'), 'should update package-lock with new added dep') }) - t.test('root as-a-workspace', async t => { + await t.test('root as-a-workspace', async t => { + createRegistry(t, true) const path = fixture(t, 'workspaces-root-linked') await reify(path) t.matchSnapshot(require(path + '/package-lock.json'), 'should produce expected package-lock file') }) - - t.end() }) t.test('reify from old package-lock with bins', async t => { const path = fixture(t, 'old-package-lock-with-bins') + createRegistry(t, true) await reify(path, {}) t.matchSnapshot( @@ -1311,6 +1421,7 @@ t.test('fail early if bins will conflict', async t => { semver: 'this is not the linked bin', }, }) + createRegistry(t, true) const arb = newArb({ global: true, path: `${path}/lib`, @@ -1324,6 +1435,7 @@ t.test('fail early if bins will conflict', async t => { t.test('add a dep present in the tree, with v1 shrinkwrap', async t => { // https://github.com/npm/arborist/issues/70 const path = fixture(t, 'old-package-lock') + createRegistry(t, true) await reify(path, { add: ['wrappy'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) }) @@ -1337,6 +1449,7 @@ t.test('store files with a custom indenting', async t => { const path = t.testdir({ 'package.json': tabIndentedPackageJson, }) + createRegistry(t, true) await reify(path) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) t.matchSnapshot(fs.readFileSync(path + '/package-lock.json', 'utf8')) @@ -1344,6 +1457,7 @@ t.test('store files with a custom indenting', async t => { t.test('do not rewrite valid package.json shorthands', async t => { const path = fixture(t, 'package-json-shorthands') + createRegistry(t, false) await reify(path) const res = require(path + '/package.json') t.equal(res.bin, './index.js', 'should not rewrite bin property') @@ -1352,28 +1466,33 @@ t.test('do not rewrite valid package.json shorthands', async t => { t.test('modules bundled by the root should be installed', async t => { const path = fixture(t, 'root-bundler') + createRegistry(t, false) await reify(path) t.matchSnapshot(fs.readFileSync(path + '/node_modules/child/package.json', 'utf8')) }) t.test('add a new pkg to a prefix that needs to be mkdirpd', async t => { - const path = resolve(t.testdir(), 'missing/path/to/root') - const tree = await reify(path, { add: ['abbrev'] }) - t.matchSnapshot( - printTree(tree), - 'should output a successful tree in mkdirp folder' - ) - t.matchSnapshot( - fs.readFileSync(path + '/package.json', 'utf8'), - 'should place expected package.json file into place' - ) - t.matchSnapshot( - fs.readFileSync(path + '/package-lock.json', 'utf8'), - 'should place expected lockfile file into place' - ) + await t.test('not dry run', async t => { + const path = resolve(t.testdir(), 'missing/path/to/root') + createRegistry(t, true) + const tree = await reify(path, { add: ['abbrev'] }) + t.matchSnapshot( + printTree(tree), + 'should output a successful tree in mkdirp folder' + ) + t.matchSnapshot( + fs.readFileSync(path + '/package.json', 'utf8'), + 'should place expected package.json file into place' + ) + t.matchSnapshot( + fs.readFileSync(path + '/package-lock.json', 'utf8'), + 'should place expected lockfile file into place' + ) + }) - t.test('dry run scenarios', async t => { + await t.test('dry run scenarios', async t => { const path = resolve(t.testdir(), 'missing/path/to/root') + createRegistry(t, false) try { await reify(path, { add: ['abbrev'], dryRun: true }) @@ -1391,6 +1510,7 @@ t.test('add a new pkg to a prefix that needs to be mkdirpd', async t => { t.test('do not delete root-bundled deps in global update', async t => { const path = t.testdir() + createRegistry(t, false) const file = resolve(__dirname, '../fixtures/bundle.tgz') await reify(path, { global: true, add: [`file:${file}`] }) const depPJ = resolve(path, 'node_modules/bundle/node_modules/dep/package.json') @@ -1401,6 +1521,7 @@ t.test('do not delete root-bundled deps in global update', async t => { t.test('do not excessively duplicate bundled metadeps', async t => { const path = fixture(t, 'bundle-metadep-duplication') + createRegistry(t, true) const tree = await reify(path) const hidden = path + '/node_modules/.package-lock.json' t.matchSnapshot(fs.readFileSync(hidden, 'utf8'), 'hidden lockfile') @@ -1411,12 +1532,14 @@ t.test('do not excessively duplicate bundled metadeps', async t => { t.test('do not reify root when root matches duplicated metadep', async t => { const path = fixture(t, 'test-root-matches-metadep') + createRegistry(t, true) await reify(path) fs.statSync(path + '/do-not-delete-this-file') }) t.test('reify properly with all deps when lockfile is ancient', async t => { const path = fixture(t, 'sax') + createRegistry(t, true) const tree = await reify(path) t.matchSnapshot(printTree(tree)) fs.statSync(path + '/node_modules/tap/node_modules/.bin/nyc') @@ -1428,6 +1551,7 @@ t.test('add multiple pkgs in a specific order', async t => { name: 'multiple-pkgs', }), }) + createRegistry(t, true) await reify(path, { add: ['wrappy', 'abbrev'] }) t.matchSnapshot( fs.readFileSync(path + '/package.json', 'utf8'), @@ -1447,6 +1571,7 @@ t.test('save complete lockfile on update-all', async t => { version: '1.0.0', }), }) + createRegistry(t, true) // install the older version first const lock = () => fs.readFileSync(`${path}/package-lock.json`, 'utf8') await reify(path, { add: ['abbrev@1.0.4'] }) @@ -1455,18 +1580,16 @@ t.test('save complete lockfile on update-all', async t => { t.matchSnapshot(lock(), 'should update, but not drop root metadata') }) -t.test('save proper lockfile with bins when upgrading lockfile', t => { - const completeOpts = [true, false] - completeOpts.forEach(complete => { - t.test(`complete=${complete}`, async t => { +t.test('save proper lockfile with bins when upgrading lockfile', async t => { + for (const complete of [true, false]) { + await t.test(`complete=${complete}`, async t => { const path = fixture(t, 'semver-installed-with-old-package-lock') + createRegistry(t, true) const lock = () => fs.readFileSync(`${path}/package-lock.json`, 'utf8') await reify(path, { complete }) t.matchSnapshot(lock(), 'should upgrade, with bins in place') }) - }) - - t.end() + } }) t.test('rollback if process is terminated during reify process', async t => { @@ -1528,6 +1651,7 @@ t.test('rollback if process is terminated during reify process', async t => { }) t.test('clean install', async t => { + createRegistry(t, true) const arb = newArb({ path }) // starting from an empty folder, ends up empty await t.rejects(arb.reify(), { @@ -1551,6 +1675,7 @@ t.test('rollback if process is terminated during reify process', async t => { name: 'abbrev', version: '0.0.0', })) + createRegistry(t, true) const arb = newArb({ path }) await t.rejects(arb.reify({ add: ['abbrev@1.1.1'] }), { message: 'process terminated', @@ -1601,8 +1726,9 @@ t.test('warn and correct if damaged data in lockfile', async t => { }), }) - t.test('first pass logs', async t => { + await t.test('first pass logs', async t => { const getLogs = warningTracker() + createRegistry(t, false) await reify(path) t.strictSame(getLogs(), [ [ @@ -1617,8 +1743,9 @@ t.test('warn and correct if damaged data in lockfile', async t => { t.matchSnapshot(fs.readFileSync(path + '/package-lock.json', 'utf8'), '"fixed" lockfile') }) - t.test('second pass just does the right thing', async t => { + await t.test('second pass just does the right thing', async t => { const getLogs = warningTracker() + createRegistry(t, true) await reify(path) t.strictSame(getLogs(), [], 'no warnings this time') t.matchSnapshot(fs.readFileSync(path + '/package-lock.json', 'utf8'), 'actually fixed lockfile') @@ -1627,6 +1754,7 @@ t.test('warn and correct if damaged data in lockfile', async t => { t.test('properly update one module when multiple are present', async t => { const path = t.testdir({}) + createRegistry(t, true) const abbrevpj = resolve(path, 'node_modules/abbrev/package.json') const oncepj = resolve(path, 'node_modules/once/package.json') @@ -1656,6 +1784,7 @@ t.test('saving should not replace file: dep with version', async t => { }, 'package.json': JSON.stringify({}), }) + createRegistry(t, false) process.chdir(path) const pj = resolve(path, 'package.json') @@ -1729,6 +1858,7 @@ t.test('filtered reification in workspaces', async t => { }, }, }) + createRegistry(t, true) const hiddenLock = resolve(path, 'node_modules/.package-lock.json') @@ -1825,6 +1955,7 @@ console.log('1..1') console.log('ok 1 - this is fine') `, }) + createRegistry(t, true) t.matchSnapshot(await printReified(path), 'result') t.resolves(runScript({ @@ -1837,6 +1968,7 @@ console.log('ok 1 - this is fine') t.test('running lifecycle scripts of unchanged link nodes on reify', async t => { const path = fixture(t, 'link-dep-lifecycle-scripts') + createRegistry(t, false) t.matchSnapshot(await printReified(path), 'result') t.ok(fs.lstatSync(resolve(path, 'a/a-prepare')).isFile(), @@ -1852,6 +1984,7 @@ t.test('save-prod, with optional', async t => { optionalDependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'], saveType: 'prod' }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1863,6 +1996,7 @@ t.test('saveBundle', async t => { dependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'], saveType: 'prod', saveBundle: true }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1875,6 +2009,7 @@ t.test('no saveType: dev w/ compatible peer', async t => { devDependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1887,6 +2022,7 @@ t.test('no saveType: dev w/ incompatible peer', async t => { devDependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1899,6 +2035,7 @@ t.test('no saveType: dev w/ compatible optional', async t => { devDependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1911,6 +2048,7 @@ t.test('no saveType: dev w/ incompatible optional', async t => { devDependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1923,6 +2061,7 @@ t.test('no saveType: prod w/ peer', async t => { dependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1934,6 +2073,7 @@ t.test('no saveType: peer only', async t => { peerDependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1945,6 +2085,7 @@ t.test('no saveType: optional only', async t => { optionalDependencies: { abbrev: '*' }, }), }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['abbrev'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -1992,6 +2133,7 @@ t.test('do not delete linked targets when link omitted', async t => { const fooindex = resolve(path, 'foo/index.js') t.equal(fs.existsSync(barpj), true, 'bar package.json present') t.equal(fs.existsSync(fooindex), true, 'foo index.js present') + createRegistry(t, false) const tree = await reify(path, { omit: ['dev'] }) t.equal(fs.existsSync(barpj), true, 'bar package.json still present') t.equal(fs.existsSync(fooindex), true, 'foo index.js still present') @@ -2001,6 +2143,7 @@ t.test('do not delete linked targets when link omitted', async t => { t.test('add spec * with semver prefix range gets updated', async t => { const path = t.testdir({ 'package.json': '{}' }) + createRegistry(t, true) const arb = newArb({ path }) await arb.reify({ add: ['latest-is-prerelease'] }) t.matchSnapshot(fs.readFileSync(path + '/package.json', 'utf8')) @@ -2037,6 +2180,7 @@ t.test('add deps to workspaces', async t => { t.test('no args', async t => { const path = t.testdir(fixture) + createRegistry(t, true) const tree = await reify(path) t.equal(tree.children.get('mkdirp').version, '1.0.4') t.equal(tree.children.get('a').target.children.get('mkdirp').version, '0.5.5') @@ -2047,6 +2191,7 @@ t.test('add deps to workspaces', async t => { t.test('add mkdirp 0.5.0 to b', async t => { const path = t.testdir(fixture) + createRegistry(t, true) await reify(path) const tree = await reify(path, { workspaces: ['b'], add: ['mkdirp@0.5.0'] }) t.equal(tree.children.get('mkdirp').version, '1.0.4') @@ -2059,6 +2204,7 @@ t.test('add deps to workspaces', async t => { t.test('remove mkdirp from a', async t => { const path = t.testdir(fixture) + createRegistry(t, true) await reify(path) const tree = await reify(path, { workspaces: ['a'], rm: ['mkdirp'] }) t.equal(tree.children.get('mkdirp').version, '1.0.4') @@ -2072,6 +2218,7 @@ t.test('add deps to workspaces', async t => { t.test('upgrade mkdirp in a, dedupe on root', async t => { const path = t.testdir(fixture) + createRegistry(t, true) await reify(path) const tree = await reify(path, { workspaces: ['a'], add: ['mkdirp@1'] }) t.equal(tree.children.get('mkdirp').version, '1.0.4') @@ -2085,6 +2232,7 @@ t.test('add deps to workspaces', async t => { t.test('add mkdirp 0.5.0 to b, empty start', async t => { const path = t.testdir(fixture) + createRegistry(t, true) const tree = await reify(path, { workspaces: ['b'], add: ['mkdirp@0.5.0'] }) t.equal(tree.children.get('mkdirp'), undefined) t.equal(tree.children.get('a'), undefined, 'did not even link workspace "a"') @@ -2096,6 +2244,7 @@ t.test('add deps to workspaces', async t => { t.test('remove mkdirp from a, empty start', async t => { const path = t.testdir(fixture) + createRegistry(t, true) const tree = await reify(path, { workspaces: ['a'], rm: ['mkdirp'] }) t.equal(tree.children.get('mkdirp'), undefined) t.equal(tree.children.get('a').target.children.get('mkdirp'), undefined) @@ -2108,6 +2257,7 @@ t.test('add deps to workspaces', async t => { t.test('upgrade mkdirp in a, dedupe on root, empty start', async t => { const path = t.testdir(fixture) + createRegistry(t, true) const tree = await reify(path, { workspaces: ['a'], add: ['mkdirp@1'] }) t.equal(tree.children.get('mkdirp').version, '1.0.4') t.equal(tree.children.get('a').target.children.get('mkdirp'), undefined) @@ -2120,6 +2270,7 @@ t.test('add deps to workspaces', async t => { t.test('add a to root', async t => { const path = t.testdir(fixture) + createRegistry(t, true) await reify(path) const tree = await reify(path, { add: ['a'], lockfileVersion: 3 }) t.matchSnapshot(printTree(tree), 'returned tree') @@ -2129,8 +2280,6 @@ t.test('add deps to workspaces', async t => { }) t.test('reify audit only workspace deps when reifying workspace', async t => { - const auditFile = resolve(__dirname, '../fixtures/audit-nyc-mkdirp/advisory-bulk.json') - t.teardown(advisoryBulkResponse(auditFile)) const path = t.testdir({ 'package.json': JSON.stringify({ workspaces: ['packages/*'], @@ -2156,6 +2305,8 @@ t.test('reify audit only workspace deps when reifying workspace', async t => { }, }, }) + const registry = createRegistry(t, true) + registry.audit({ results: require('../fixtures/audit-nyc-mkdirp/advisory-bulk.json') }) const arb = newArb({ path, audit: true, workspaces: ['a'] }) const tree = await arb.reify() const report = arb.auditReport.toJSON() @@ -2227,6 +2378,7 @@ t.test('update a dep when the lockfile is lying about it', async t => { }, }) + createRegistry(t, true) const tree = await reify(path) const abbrev = tree.children.get('abbrev') t.equal(abbrev.version, '1.1.1') @@ -2238,6 +2390,7 @@ t.test('shrinkwrap which lacks metadata updates deps', async t => { 'package.json': '{}', }) + createRegistry(t, true) const first = await reify(path, { add: ['@isaacs/testing-shrinkwrap-abbrev@1.2.0'], }) @@ -2296,6 +2449,7 @@ t.test('move aside symlink clutter', async t => { return this[kReifyPackages]() } + createRegistry(t, true) const tree = await printReified(path) const st = fs.lstatSync(path + '/node_modules/abbrev') t.equal(st.isSymbolicLink(), false) @@ -2335,6 +2489,7 @@ t.test('collide case-variant dep names', async t => { }, }) + createRegistry(t, true) const tree = await printReified(path) const st = fs.lstatSync(path + '/node_modules/abbrev') t.equal(st.isSymbolicLink(), false) @@ -2375,6 +2530,7 @@ t.test('node_modules may not be a symlink', async t => { }, }), }) + createRegistry(t, true) const warnings = warningTracker() const tree = await printReified(path) t.matchSnapshot(tree) @@ -2399,6 +2555,7 @@ t.test('never unpack into anything other than a real directory', async t => { }), }, }) + createRegistry(t, true) const arb = newArb({ path }) const logs = debugLogTracker() const wrappy = resolve(path, 'node_modules/once/node_modules/wrappy') @@ -2426,6 +2583,7 @@ t.test('adding an unresolvable optional dep is OK', async t => { }, }), }) + createRegistry(t, true) const tree = await reify(path, { add: ['abbrev'] }) t.strictSame([...tree.children.values()], [], 'nothing actually added') t.matchSnapshot(printTree(tree)) @@ -2460,6 +2618,7 @@ t.test('includeWorkspaceRoot in addition to workspace', async t => { }, }, }) + createRegistry(t, true) const tree = await reify(path, { includeWorkspaceRoot: true, workspaces: ['a'] }) t.matchSnapshot(printTree(tree)) t.equal(tree.inventory.query('name', 'semver').size, 0) @@ -2496,6 +2655,7 @@ t.test('no workspace', async t => { }, }, }) + createRegistry(t, true) const tree = await reify(path, { workspacesEnabled: false, workspaces: ['a', 'b'] }) t.matchSnapshot(printTree(tree)) t.equal(tree.inventory.query('name', 'semver').size, 0) @@ -2531,6 +2691,7 @@ t.test('add local dep with existing dev + peer/optional', async t => { t.teardown(() => process.chdir(cwd)) process.chdir(project) + createRegistry(t, false) const tree = await reify(project, { add: ['../dep'] }) t.matchSnapshot(printTree(tree), 'tree') @@ -2554,6 +2715,7 @@ t.test('runs dependencies script if tree changes', async (t) => { }), }) + createRegistry(t, true) await reify(path) for (const script of ['predependencies', 'dependencies', 'postdependencies']) { @@ -2591,6 +2753,7 @@ t.test('save package.json on update', t => { t.test('should save many deps in multiple package.json when using save=true', async t => { const path = fixture(t, 'workspaces-need-update') + createRegistry(t, true) await reify(path, { update: true, save: true }) t.same( @@ -2612,6 +2775,7 @@ t.test('save package.json on update', t => { t.test('should not save many deps in multiple package.json when using save=false', async t => { const path = fixture(t, 'workspaces-need-update') + createRegistry(t, true) await reify(path, { update: true, save: false }) t.same( @@ -2637,6 +2801,7 @@ t.test('save package.json on update', t => { t.test('should not save any with save=false and package-lock=false', async t => { const path = fixture(t, 'workspaces-need-update') + createRegistry(t, true) await reify(path, { update: true, save: false, packageLock: false }) t.same( @@ -2662,6 +2827,7 @@ t.test('save package.json on update', t => { t.test('should update named dep across multiple package.json using save=true', async t => { const path = fixture(t, 'workspaces-need-update') + createRegistry(t, true) await reify(path, { update: ['abbrev'], save: true }) t.same( @@ -2690,6 +2856,7 @@ t.test('save package.json on update', t => { t.test('should update single named dep across multiple package.json using save=true', async t => { const path = fixture(t, 'workspaces-need-update') + createRegistry(t, true) await reify(path, { update: ['once'], save: true }) t.same( @@ -2718,6 +2885,7 @@ t.test('save package.json on update', t => { t.test('should preserve exact ranges', async t => { const path = fixture(t, 'update-exact-version') + createRegistry(t, true) await reify(path, { update: true, save: true }) t.equal( @@ -2736,6 +2904,7 @@ t.test('save package.json on update', t => { }), }) + createRegistry(t, true) await reify(path, { update: true, save: true }) t.equal( @@ -2764,6 +2933,7 @@ t.test('save package.json on update', t => { }, }) + createRegistry(t, false) await t.resolves(reify(resolve(path, 'one'), { update: true, save: true, workspaces: [] })) t.equal( @@ -2799,6 +2969,7 @@ t.test('installLinks', (t) => { }, }) + createRegistry(t, false) await reify(resolve(path, 'a'), { installLinks: true }) const installedB = fs.lstatSync(resolve(path, 'a/node_modules/b')) @@ -2828,6 +2999,7 @@ t.test('installLinks', (t) => { }, }) + createRegistry(t, false) await reify(resolve(path, 'a'), { installLinks: false }) const installedB = fs.lstatSync(resolve(path, 'a/node_modules/b')) @@ -2857,6 +3029,7 @@ t.test('installLinks', (t) => { }, }) + createRegistry(t, false) await reify(resolve(path, 'a'), { installLinks: false, save: true }) const firstB = fs.lstatSync(resolve(path, 'a/node_modules/b')) @@ -2891,6 +3064,7 @@ t.test('installLinks', (t) => { }, }) + createRegistry(t, false) await reify(resolve(path, 'a'), { installLinks: true }) const firstB = fs.lstatSync(resolve(path, 'a/node_modules/b')) @@ -2928,6 +3102,7 @@ t.test('installLinks', (t) => { }, }) + createRegistry(t, true) await reify(resolve(path, 'a'), { installLinks: true }) const installedB = fs.lstatSync(resolve(path, 'a/node_modules/b')) @@ -2973,6 +3148,7 @@ t.test('installLinks', (t) => { }, }) + createRegistry(t, true) await reify(resolve(path, 'a'), { installLinks: true }) const installedB = fs.lstatSync(resolve(path, 'a/node_modules/b')) diff --git a/workspaces/arborist/test/audit-report.js b/workspaces/arborist/test/audit-report.js index 6f4bcf8858f35..f546793688490 100644 --- a/workspaces/arborist/test/audit-report.js +++ b/workspaces/arborist/test/audit-report.js @@ -1,27 +1,14 @@ const t = require('tap') const localeCompare = require('@isaacs/string-locale-compare')('en') const AuditReport = require('../lib/audit-report.js') -const { auditToBulk } = AuditReport const Node = require('../lib/node.js') const Arborist = require('../') +const MockRegistry = require('@npmcli/mock-registry') -const { - start, - stop, - registry, - auditResponse, - failAudit, - advisoryBulkResponse, -} = require('./fixtures/server.js') -t.before(start) -t.teardown(stop) - -const { resolve } = require('node:path') +const { join, resolve } = require('node:path') const fixtures = resolve(__dirname, 'fixtures') -const cache = t.testdir() -const newArb = (path, opts = {}) => - new Arborist({ path, registry, cache, ...opts }) +const newArb = (path, opts = {}) => new Arborist({ path, ...opts }) const sortReport = report => { const entries = Object.entries(report.vulnerabilities) @@ -41,11 +28,22 @@ const sortReport = report => { }, {}) } +const createRegistry = (t) => { + const registry = new MockRegistry({ + strict: true, + tap: t, + registry: 'https://registry.npmjs.org', + }) + return registry +} + t.test('all severity levels', async t => { const path = resolve(fixtures, 'audit-all-severities') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -55,9 +53,11 @@ t.test('all severity levels', async t => { t.test('vulnerable dep not from registry', async t => { const path = resolve(fixtures, 'minimist-git-dep') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -69,9 +69,11 @@ t.test('vulnerable dep not from registry', async t => { t.test('metavuln where dep is not a registry dep', async t => { const path = resolve(fixtures, 'minimist-git-metadep') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -84,9 +86,11 @@ t.test('metavuln where dep is not a registry dep', async t => { t.test('metavuln where a dep is not on the registry at all', async t => { const path = resolve(fixtures, 'audit-missing-packument') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -97,10 +101,11 @@ t.test('metavuln where a dep is not on the registry at all', async t => { t.test('get advisory about node not in tree', async t => { // this should never happen, but if it does, we're prepared for it const path = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() tree.children.get('mkdirp').parent = null @@ -116,8 +121,6 @@ t.test('get advisory about node not in tree', async t => { } } const report = await AuditReport.load(tree, arb.options) - // just a gut-check that the registry server is actually doing stuff - t.match(report.report, auditToBulk(require(auditFile)), 'got expected response') t.equal(report.topVulns.size, 0, 'one top node found vulnerable') t.equal(report.size, 0, 'no vulns that were relevant') t.equal(report.get('nyc'), undefined) @@ -126,9 +129,11 @@ t.test('get advisory about node not in tree', async t => { t.test('unfixable, but not a semver major forced fix', async t => { const path = resolve(fixtures, 'mkdirp-pinned') - const auditFile = resolve(fixtures, 'audit-nyc-mkdirp/audit.json') - t.teardown(auditResponse(auditFile)) - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(fixtures, 'audit-nyc-mkdirp', 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -139,41 +144,16 @@ t.test('unfixable, but not a semver major forced fix', async t => { t.test('audit outdated nyc and mkdirp', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - - const arb = newArb(path) - - const tree = await arb.loadVirtual() - const report = await AuditReport.load(tree, arb.options) - t.matchSnapshot(JSON.stringify(report, 0, 2), 'json version') - - // just a gut-check that the registry server is actually doing stuff - t.match(report.report, auditToBulk(require(auditFile)), 'got expected response') - - t.throws(() => report.set('foo', 'bar'), { - message: 'do not call AuditReport.set() directly', - }) - - t.equal(report.topVulns.size, 1, 'one top node found vulnerable') - t.equal(report.get('nyc').simpleRange, '6.2.0-alpha - 13.1.0') - t.equal(report.get('mkdirp').simpleRange, '0.4.1 - 0.5.1') -}) - -t.test('audit outdated nyc and mkdirp with newer endpoint', async t => { - const path = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(path, 'advisory-bulk.json') - t.teardown(advisoryBulkResponse(auditFile)) - - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ results: require(resolve(path, 'advisory-bulk.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) t.matchSnapshot(JSON.stringify(report, 0, 2), 'json version') - // just a gut-check that the registry server is actually doing stuff - t.match(report.report, require(auditFile), 'got expected response') - t.throws(() => report.set('foo', 'bar'), { message: 'do not call AuditReport.set() directly', }) @@ -185,18 +165,17 @@ t.test('audit outdated nyc and mkdirp with newer endpoint', async t => { t.test('audit outdated nyc and mkdirp with before: option', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) + const registry = createRegistry(t) + registry.audit({ results: require(resolve(path, 'advisory-bulk.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) - const arb = newArb(path, { before: new Date('2020-01-01') }) + const cache = t.testdir() + const arb = newArb(path, { before: new Date('2020-01-01'), cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) t.matchSnapshot(JSON.stringify(report, 0, 2), 'json version') - // just a gut-check that the registry server is actually doing stuff - t.match(report.report, auditToBulk(require(auditFile)), 'got expected response') - t.equal(report.topVulns.size, 1, 'one top node found vulnerable') t.equal(report.get('nyc').simpleRange, '6.2.0-alpha - 13.1.0') t.equal(report.get('mkdirp').simpleRange, '0.4.1 - 0.5.1') @@ -204,7 +183,8 @@ t.test('audit outdated nyc and mkdirp with before: option', async t => { t.test('audit returns an error', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') - t.teardown(failAudit()) + const registry = createRegistry(t) + registry.audit({ responseCode: 503, results: 'no audit for you' }) const logs = [] const onlog = (...msg) => { @@ -216,7 +196,8 @@ t.test('audit returns an error', async t => { process.on('log', onlog) t.teardown(() => process.removeListener('log', onlog)) - const arb = newArb(path) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -228,11 +209,6 @@ t.test('audit returns an error', async t => { 'audit', 'bulk request', ], - [ - 'silly', - 'audit', - 'bulk request failed', - ], [ 'verbose', 'audit error', @@ -245,13 +221,15 @@ t.test('audit returns an error', async t => { t.test('audit disabled by config', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') + createRegistry(t) const logs = [] const onlog = (...msg) => logs.push(msg) process.on('log', onlog) t.teardown(() => process.removeListener('log', onlog)) - const arb = newArb(path, { audit: false }) + const cache = t.testdir() + const arb = newArb(path, { audit: false, cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -263,13 +241,15 @@ t.test('audit disabled by config', async t => { t.test('audit disabled by offline mode', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') + createRegistry(t) const logs = [] const onlog = (...msg) => logs.push(msg) process.on('log', onlog) t.teardown(() => process.removeListener('log', onlog)) - const arb = newArb(path, { offline: true }) + const cache = t.testdir() + const arb = newArb(path, { offline: true, cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -281,9 +261,11 @@ t.test('audit disabled by offline mode', async t => { t.test('one vulnerability', async t => { const path = resolve(fixtures, 'audit-one-vuln') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -294,9 +276,11 @@ t.test('one vulnerability', async t => { t.test('a dep vuln that also has its own advisory against it', async t => { const path = resolve(fixtures, 'audit-dep-vuln-with-own-advisory') - const auditFile = resolve(path, 'audit.json') - t.teardown(auditResponse(auditFile)) - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ convert: true, results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) @@ -311,33 +295,17 @@ t.test('get default opts when loaded without opts', async t => { t.strictSame(ar.options, {}) }) -t.test('error on audit response with no advisories object', async t => { - const dir = t.testdir({ - 'audit.json': JSON.stringify({ no: 'advisories', at: 'all' }), - }) - const path = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(dir, 'audit.json') - t.teardown(auditResponse(auditFile)) - - const arb = newArb(path) - - const tree = await arb.loadVirtual() - const report = await AuditReport.load(tree, arb.options) - t.match(report.error, { - message: 'Invalid advisory report', - body: JSON.stringify({ no: 'advisories', at: 'all' }), - }) -}) - t.test('audit report with a lying v5 lockfile', async t => { // npm v5 stored the resolved dependency version in the `requires` // set, rather than the spec that is actually required. As a result, // a dep may _appear_ to be a metavuln, but when we scan the // packument, it turns out that it matches no nodes, and gets deleted. const path = resolve(fixtures, 'eslintme') - const arb = newArb(path) - const auditFile = resolve(path, 'audit.json') - t.teardown(advisoryBulkResponse(auditFile)) + const registry = createRegistry(t) + registry.audit({ results: require(resolve(path, 'audit.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const report = await AuditReport.load(tree, arb.options) // also try to delete something that just very much is not present @@ -348,9 +316,6 @@ t.test('audit report with a lying v5 lockfile', async t => { t.test('omit options', async t => { const path = resolve(fixtures, 'audit-omit') - const quick = resolve(path, 'quick.json') - // quick response doesn't change for omit args - t.teardown(auditResponse(quick)) const omits = [ [], ['dev'], @@ -360,30 +325,30 @@ t.test('omit options', async t => { ['peer', 'dev'], ['peer', 'dev', 'optional'], // empty ] - const arb = newArb(path) - const tree = await arb.loadVirtual() - for (const omit of omits) { - t.test(`omit=[${omit.join(',')}]`, async t => { + await t.test(`omit=[${omit.join(',')}]`, async t => { + const cache = t.testdir() + const arb = newArb(path, { cache }) + const tree = await arb.loadVirtual() + const registry = createRegistry(t) const s = omit.map(o => `-omit${o}`).join('') - const bulk = resolve(path, `bulk${s}.json`) - const rmBulk = advisoryBulkResponse(bulk) - const r1 = (await AuditReport.load(tree, { ...arb.options, omit })) - .toJSON() + const bulkResults = require(resolve(path, `bulk${s}.json`)) + if (Object.keys(bulkResults).length) { /// peer, dev, optional is empty + registry.audit({ convert: false, results: bulkResults }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + } + const r1 = (await AuditReport.load(tree, { ...arb.options, omit })).toJSON() sortReport(r1) - rmBulk() t.matchSnapshot(r1, 'bulk') - const r2 = (await AuditReport.load(tree, { ...arb.options, omit })) - .toJSON() + const r2 = (await AuditReport.load(tree, { ...arb.options, omit })).toJSON() sortReport(r2) t.strictSame(r1, r2, 'same results') - t.end() }) } - t.end() }) t.test('audit when tree is empty', async t => { + createRegistry(t) const tree = new Node({ path: '/path/to/tree', }) @@ -393,6 +358,7 @@ t.test('audit when tree is empty', async t => { }) t.test('audit when bulk report doenst have anything in it', async t => { + createRegistry(t) const tree = new Node({ path: '/path/to/tree', pkg: { @@ -409,47 +375,11 @@ t.test('audit when bulk report doenst have anything in it', async t => { t.strictSame(report, null) }) -t.test('default severity=high, vulnerable_versions=*', async t => { - const audit = { - actions: [], - advisories: { - 755: { - findings: [ - { - version: '1.2.3', - paths: [ - 'something', - ], - }, - ], - id: 755, - title: 'no severity or vulnerable versions', - module_name: 'something', - overview: 'should default severity=high, vulnerable_versions=*', - recommendation: "don't use this thing", - url: 'https://npmjs.com/advisories/755', - }, - }, - muted: [], - metadata: { - vulnerabilities: {}, - dependencies: 1, - devDependencies: 0, - optionalDependencies: 0, - totalDependencies: 1, - }, - runId: 'just-some-unique-identifier', - } - - const bulk = auditToBulk(audit) - t.match(bulk, { something: [{ severity: 'high', vulnerable_versions: '*' }] }) - t.end() -}) - t.test('audit supports alias deps', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(path, 'advisory-bulk.json') - t.teardown(advisoryBulkResponse(auditFile)) + const registry = createRegistry(t) + registry.audit({ results: require(resolve(path, 'advisory-bulk.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) const tree = new Node({ path, pkg: { @@ -484,17 +414,18 @@ t.test('audit supports alias deps', async t => { ], }) - const report = await AuditReport.load(tree, { path, registry, cache }) + const report = await AuditReport.load(tree, { path }) t.matchSnapshot(JSON.stringify(report, 0, 2), 'json version') t.equal(report.get('mkdirp').simpleRange, '0.4.1 - 0.5.1') }) t.test('audit with filterSet limiting to only mkdirp and minimist', async t => { const path = resolve(fixtures, 'audit-nyc-mkdirp') - const auditFile = resolve(path, 'advisory-bulk.json') - t.teardown(advisoryBulkResponse(auditFile)) - - const arb = newArb(path) + const registry = createRegistry(t) + registry.audit({ results: require(resolve(path, 'advisory-bulk.json')) }) + registry.mocks({ dir: join(__dirname, 'fixtures') }) + const cache = t.testdir() + const arb = newArb(path, { cache }) const tree = await arb.loadVirtual() const filterSet = new Set([ diff --git a/workspaces/arborist/test/fixtures/server.js b/workspaces/arborist/test/fixtures/server.js deleted file mode 100644 index 61962b16c09be..0000000000000 --- a/workspaces/arborist/test/fixtures/server.js +++ /dev/null @@ -1,250 +0,0 @@ -const { join, dirname } = require('node:path') -const { promisify } = require('node:util') -const fs = require('node:fs/promises') -const http = require('node:http') -const https = require('node:https') -const zlib = require('node:zlib') -const mrm = require('minify-registry-metadata') - -const gzip = promisify(zlib.gzip) -const unzip = promisify(zlib.unzip) -const mkdirp = (p) => fs.mkdir(p, { recursive: true }) -const exists = (p) => fs.stat(p).then(() => true).catch(() => false) -const writeJson = (p, d) => fs.writeFile(p, JSON.stringify(d, null, 2) + '\n') - -const PORT = 12345 + (+process.env.TAP_CHILD_ID || 0) -const doProxy = process.env.ARBORIST_TEST_PROXY - -const missing = /\/@isaacs(\/|%2[fF])(this-does-not-exist-at-all|testing-missing-tgz\/-\/)/ -const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' - -let advisoryBulkResponse = null -let failAdvisoryBulk = false -let auditResponse = null -let failAudit = false - -const proxyRegistry = async (req, res) => { - const upstream = await new Promise((resolve) => { - https.get({ - host: 'registry.npmjs.org', - path: req.url, - headers: { - ...req.headers, - accept: '*', - 'accept-encoding': 'identity', - host: 'registry.npmjs.org', - connection: 'close', - 'if-none-match': '', - }, - }).on('response', response => { - const { statusCode, headers } = response - const data = [] - const error = statusCode >= 300 || statusCode < 200 - const contentEncoding = headers['content-encoding'] - const contentType = headers['content-type'] - - console.error('[PROXY] START', `${req.url}: ${statusCode} ${contentType}`) - if (error) { - console.error('[PROXY] UPSTREAM ERROR', statusCode) - } - - res.statusCode = statusCode - res.setHeader('content-encoding', contentEncoding) - res.setHeader('content-type', contentType) - - response.on('end', () => { - console.error('[PROXY] END', req.url) - resolve({ error, data: Buffer.concat(data), contentType }) - }) - response.on('data', c => data.push(c)) - }).end() - }) - - return upstream -} - -const bulkAdvisoriesRoute = async (req, res) => { - if (failAdvisoryBulk) { - res.statusCode = 503 - return res.end('no advisory bulk for you') - } - - const file = advisoryBulkResponse - - if (!file) { - if (auditResponse && !failAudit) { - // simulate what the registry does when quick audits are allowed, - // but advisory bulk requests are not - res.statusCode = 405 - return res.end(JSON.stringify({ - code: 'MethodNotAllowedError', - message: 'POST is not allowed', - })) - } - res.statusCode = 404 - return res.end('not found') - } - - if (doProxy && !(await exists(file))) { - const { error, data } = await proxyRegistry(req, res) - - if (!error) { - await mkdirp(dirname(file)) - const obj = await unzip(data).then(r => JSON.parse(r.toString())) - await writeJson(file, obj) - } - - return res.end(data) - } - - res.setHeader('content-encoding', 'gzip') - const data = await fs.readFile(file).then(r => gzip(r)) - return res.end(data) -} - -const quickAuditRoute = async (req, res) => { - if (failAudit) { - res.statusCode = 503 - return res.end('no audit for you') - } - - const file = auditResponse - - if (!file) { - res.statusCode = 404 - return res.end('not found') - } - - if (doProxy && !(await exists(file))) { - const { error, data } = await proxyRegistry(req, res) - - if (!error) { - await mkdirp(dirname(file)) - const unzipped = await unzip(data).then(r => r.toString()) - const obj = JSON.parse(unzipped) - await writeJson(file, obj) - } - - return res.end(data) - } - - res.setHeader('content-encoding', 'gzip') - const data = await fs.readFile(file).then(r => gzip(r)) - return res.end(data) -} - -const onRequest = async (req, res) => { - res.setHeader('connection', 'close') - - if (req.url === '/-/npm/v1/security/advisories/bulk') { - return await bulkAdvisoriesRoute(req, res) - } - - if (req.url === '/-/npm/v1/security/audits/quick') { - return await quickAuditRoute(req, res) - } - - const f = join(__dirname, 'registry-mocks', 'content', join('/', req.url.replace(/@/g, '').replace(/%2f/gi, '/'))) - const isCorgi = req.headers.accept.includes('application/vnd.npm.install-v1+json') - - let file = f - if (isCorgi && await exists(`${f}.min.json`)) { - file += '.min.json' - } else if (await exists(`${f}.json`)) { - file += '.json' - } else if (await exists(`${f}/index.json`)) { - file += 'index.json' - } - - const { body, error } = await fs.readFile(file) - .then((body) => ({ body })) - .catch((error) => ({ error })) - - if (error) { - // testing things going missing from the registry somehow - if (missing.test(req.url)) { - res.statusCode = 404 - return res.end('{"error": "not found"}') - } - - if (doProxy) { - const { error: proxyError, contentType, data } = await proxyRegistry(req, res) - - if (!proxyError) { - await mkdirp(dirname(f)) - - if (contentType.includes('application/json')) { - const file = `${f}.json` - const obj = JSON.parse(data.toString()) - await Promise.all([ - writeJson(file, obj), - writeJson(file.replace(/\.json$/, '.min.json'), mrm(obj)), - ]) - } else { - await fs.writeFile(f, data) - } - } - - return res.end(data) - } - - res.statusCode = error.code === 'ENOENT' ? 404 : 500 - if (res.method === 'GET') { - console.error(error) - } - res.setHeader('content-type', 'text/plain') - return res.end(error.stack) - } - - res.setHeader('content-length', body.length) - res.setHeader('content-type', /\.min\.json$/.test(file) ? corgiDoc - : /\.json$/.test(file) ? 'application/json' - : 'application/octet-stream') - return res.end(body) -} - -const startServer = async () => { - const server = exports.server = http.createServer(onRequest) - await new Promise(res => server.listen(PORT, res)) -} - -exports.auditResponse = value => { - if (auditResponse && auditResponse !== value) { - throw new Error('setting audit response, but already set\n' + - '(did you forget to call the returned function on teardown?)') - } - auditResponse = value - return () => auditResponse = null -} - -exports.failAudit = () => { - failAudit = true - return () => failAudit = false -} - -exports.advisoryBulkResponse = value => { - if (advisoryBulkResponse && advisoryBulkResponse !== value) { - throw new Error('setting advisory bulk response, but already set\n' + - '(did you forget to call the returned function on teardown?)') - } - advisoryBulkResponse = value - return () => advisoryBulkResponse = null -} - -exports.failAdvisoryBulk = () => { - failAdvisoryBulk = true - return () => failAdvisoryBulk = false -} - -exports.registry = `http://localhost:${PORT}/` - -exports.start = startServer -exports.stop = () => exports.server.close() - -if (require.main === module) { - startServer() - .then(() => console.log(`Mock registry live at:\n${exports.registry}\nPress ^D to close gracefully.`)) - .catch(console.error) - process.openStdin() - process.stdin.on('end', () => exports.stop()) -}