diff --git a/.github/workflows/nx-cloud-workflow-validations.yml b/.github/workflows/nx-cloud-workflow-validations.yml index ab3a4b7..3a2e84c 100644 --- a/.github/workflows/nx-cloud-workflow-validations.yml +++ b/.github/workflows/nx-cloud-workflow-validations.yml @@ -11,8 +11,9 @@ jobs: runs-on: ubuntu-latest steps: - - name: Checkout code - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + with: + fetch-depth: 0 - name: Setup Node uses: actions/setup-node@v4 with: @@ -24,3 +25,9 @@ jobs: run: | chmod +x ./scripts/check-launch-templates.sh ./scripts/check-launch-templates.sh + - uses: nrwl/nx-set-shas@v4 + - run: git branch --track main origin/main + if: ${{ github.event_name == 'pull_request' }} + - name: Run checks + run: | + yarn nx affected -t test diff --git a/workflow-steps/cache/README.md b/workflow-steps/cache/README.md index 6746976..be2e7f0 100644 --- a/workflow-steps/cache/README.md +++ b/workflow-steps/cache/README.md @@ -49,6 +49,13 @@ paths: | All above locations will be cached and subsequently restored. +If you have multiple `node_modules` folder you can also pass in a glob path: + +```yaml +paths: | + packages/*/node_modules +``` + ## `base-branch` For security reasons, this step will only write cache entries **for the current branch only**. This isolation is diff --git a/workflow-steps/cache/hashing-utils.ts b/workflow-steps/cache/hashing-utils.ts index 84e0cdf..c9aaa25 100644 --- a/workflow-steps/cache/hashing-utils.ts +++ b/workflow-steps/cache/hashing-utils.ts @@ -1,5 +1,3 @@ -import * as string_decoder from 'string_decoder'; - const fs = require('fs'); const crypto = require('crypto'); import { glob } from 'glob'; @@ -49,3 +47,36 @@ export function hashKey(key: string): string { function hash(input: string) { return crypto.createHash('sha256').update(input).digest('hex'); } + +export function buildCachePaths(inputPaths: string) { + const directories = Array.from( + new Set( + inputPaths + .split('\n') + .filter((p) => p) + .reduce( + (allPaths, currPath) => [...allPaths, ...expandPath(currPath)], + [], + ), + ), + ); + + const invalidDirectories = directories.filter((dir) => !fs.existsSync(dir)); + if (invalidDirectories.length > 0) { + console.warn( + `The following paths are not valid or empty:\n${invalidDirectories.join( + '\n', + )}`, + ); + } + return directories; +} + +function expandPath(pattern: string): string[] { + const globExpandedPaths = glob.sync(pattern); + if (globExpandedPaths.length == 0) { + // it's probably not a valid path so we return it so it can be included in the error above + return [pattern]; + } + return globExpandedPaths; +} diff --git a/workflow-steps/cache/hasing-utils.spec.ts b/workflow-steps/cache/hasing-utils.spec.ts index 662c550..4ed2e44 100644 --- a/workflow-steps/cache/hasing-utils.spec.ts +++ b/workflow-steps/cache/hasing-utils.spec.ts @@ -1,8 +1,18 @@ -import { hashKey } from './hashing-utils'; +import { buildCachePaths, hashKey } from './hashing-utils'; import * as path from 'path'; describe('hashing-utils', () => { const testDir = path.join(__dirname, 'test-files'); + let consoleWarnSpy; + + beforeEach(() => { + consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + }); + + afterEach(() => { + consoleWarnSpy.mockRestore(); + }); + it('should hash a single file', () => { expect(hashKey(`${testDir}/yarn.lock`)).toEqual( '6ef0d64a2ac614adc8dac86db67244e77cdad3253a65fb8e2b7c11ed4cbb466a', @@ -27,4 +37,72 @@ describe('hashing-utils', () => { '226f813c92638665c8daa0920cfb83e5f33732f8843042deee348032a1abee40', ); }); + + it('should validate simple dirs', () => { + let input = `test-files/packages/app1`; + let expected = [`test-files/packages/app1`]; + expect(buildCachePaths(input)).toEqual(expected); + + input = `test-files/packages/app2\ntest-files/packages/app3\n\n`; + expected = [`test-files/packages/app2`, `test-files/packages/app3`]; + expect(buildCachePaths(input)).toEqual(expected); + + input = `test-files/yarn.lock\ntest-files/packages/app3\n\n`; + expected = [`test-files/yarn.lock`, `test-files/packages/app3`]; + expect(buildCachePaths(input)).toEqual(expected); + + input = `test-files/yarn.lock\ntest-files/main.js`; + expected = [`test-files/yarn.lock`, `test-files/main.js`]; + expect(buildCachePaths(input)).toEqual(expected); + }); + + it('should warn when invalid dirs are specified', () => { + const input = `test-files/packages/app6`; + buildCachePaths(input); + expect(consoleWarnSpy).toHaveBeenCalledWith( + 'The following paths are not valid or empty:\n' + + 'test-files/packages/app6', + ); + }); + + it('should warn when invalid dirs are specified', () => { + const input = `test-files/packages/app2\ntest-files/packages/app7\n\n`; + buildCachePaths(input); + expect(consoleWarnSpy).toHaveBeenCalledWith( + 'The following paths are not valid or empty:\n' + + 'test-files/packages/app7', + ); + }); + + it('should support glob paths', () => { + let input = `test-files/packages/*/mock_node_modules`; + let expected = [ + `test-files/packages/app1/mock_node_modules`, + `test-files/packages/app2/mock_node_modules`, + `test-files/packages/app3/mock_node_modules`, + ]; + expect(buildCachePaths(input)).toEqual(expected); + + // it should filter out duplicates + input = `test-files/packages/app1/mock_node_modules\ntest-files/packages/*/mock_node_modules\ntest-files/packages`; + expected = [ + `test-files/packages/app1/mock_node_modules`, + `test-files/packages/app2/mock_node_modules`, + `test-files/packages/app3/mock_node_modules`, + `test-files/packages`, + ]; + expect(buildCachePaths(input)).toEqual(expected); + }); + + it('should filter out duplicates', () => { + const input = `test-files/packages/app1\ntest-files/packages/app1/mock_node_modules\ntest-files/packages/*/mock_node_modules\ntest-files/packages`; + const expected = [ + `test-files/packages/app1`, + `test-files/packages/app1/mock_node_modules`, + `test-files/packages/app2/mock_node_modules`, + `test-files/packages/app3/mock_node_modules`, + `test-files/packages`, + ]; + expect(buildCachePaths(input)).toEqual(expected); + }); }); diff --git a/workflow-steps/cache/main.ts b/workflow-steps/cache/main.ts index 9e3eb34..360ca1a 100644 --- a/workflow-steps/cache/main.ts +++ b/workflow-steps/cache/main.ts @@ -6,7 +6,9 @@ import { hashKey } from './hashing-utils'; import { appendFileSync, writeFileSync, existsSync } from 'fs'; const input_key = process.env.NX_CLOUD_INPUT_key; -const input_base_branch = process.env.NX_CLOUD_INPUT_base_branch; +const baseBranch = + process.env.NX_CLOUD_INPUT_base_branch || + process.env['NX_CLOUD_INPUT_base-branch']; export const cacheClient = createPromiseClient( CacheService, @@ -16,7 +18,6 @@ export const cacheClient = createPromiseClient( ); const currentBranch = process.env.NX_BRANCH; -const baseBranch = input_base_branch; if (!input_key) { throw new Error('No cache restore key provided.'); diff --git a/workflow-steps/cache/output/main.js b/workflow-steps/cache/output/main.js index 9d24c66..4214d58 100644 --- a/workflow-steps/cache/output/main.js +++ b/workflow-steps/cache/output/main.js @@ -5991,7 +5991,7 @@ function hash(input) { // main.ts var import_fs = require("fs"); var input_key = process.env.NX_CLOUD_INPUT_key; -var input_base_branch = process.env.NX_CLOUD_INPUT_base_branch; +var baseBranch = process.env.NX_CLOUD_INPUT_base_branch || process.env["NX_CLOUD_INPUT_base-branch"]; var cacheClient = createPromiseClient( CacheService, createConnectTransport({ @@ -5999,7 +5999,6 @@ var cacheClient = createPromiseClient( }) ); var currentBranch = process.env.NX_BRANCH; -var baseBranch = input_base_branch; if (!input_key) { throw new Error("No cache restore key provided."); } diff --git a/workflow-steps/cache/output/post.js b/workflow-steps/cache/output/post.js index 1a072fc..38fca59 100644 --- a/workflow-steps/cache/output/post.js +++ b/workflow-steps/cache/output/post.js @@ -5975,6 +5975,33 @@ function hashKey(key) { function hash(input) { return crypto.createHash("sha256").update(input).digest("hex"); } +function buildCachePaths(inputPaths) { + const directories = Array.from( + new Set( + inputPaths.split("\n").filter((p) => p).reduce( + (allPaths, currPath) => [...allPaths, ...expandPath(currPath)], + [] + ) + ) + ); + const invalidDirectories = directories.filter((dir) => !fs.existsSync(dir)); + if (invalidDirectories.length > 0) { + console.warn( + `The following paths are not valid or empty: +${invalidDirectories.join( + "\n" + )}` + ); + } + return directories; +} +function expandPath(pattern) { + const globExpandedPaths = import_glob.glob.sync(pattern); + if (globExpandedPaths.length == 0) { + return [pattern]; + } + return globExpandedPaths; +} // post.ts var input_key = process.env.NX_CLOUD_INPUT_key; @@ -5993,7 +6020,8 @@ if (!!cacheWasHit) { throw new Error("No cache restore key or paths provided."); } const key = hashKey(input_key); - const paths = input_paths.split("\n").filter((p) => p); + const paths = buildCachePaths(input_paths); + console.log("Storing the following directories..\n" + paths.join("\n")); cacheClient.storeV2( new StoreRequest({ key, diff --git a/workflow-steps/cache/post.ts b/workflow-steps/cache/post.ts index 676255c..ad0bf63 100644 --- a/workflow-steps/cache/post.ts +++ b/workflow-steps/cache/post.ts @@ -2,7 +2,7 @@ import { createPromiseClient } from '@bufbuild/connect'; import { createConnectTransport } from '@bufbuild/connect-web'; import { CacheService } from './generated_protos/cache_connect'; import { StoreRequest, StoreResponse } from './generated_protos/cache_pb'; -import { hashKey } from './hashing-utils'; +import { buildCachePaths, hashKey } from './hashing-utils'; const input_key = process.env.NX_CLOUD_INPUT_key; const input_paths = process.env.NX_CLOUD_INPUT_paths; @@ -25,7 +25,9 @@ if (!!cacheWasHit) { throw new Error('No cache restore key or paths provided.'); } const key = hashKey(input_key); - const paths = input_paths.split('\n').filter((p) => p); + const paths = buildCachePaths(input_paths); + + console.log('Storing the following directories..\n' + paths.join('\n')); cacheClient .storeV2( diff --git a/workflow-steps/cache/test-files/packages/app1/mock_node_modules/one.txt b/workflow-steps/cache/test-files/packages/app1/mock_node_modules/one.txt new file mode 100644 index 0000000..7c4a013 --- /dev/null +++ b/workflow-steps/cache/test-files/packages/app1/mock_node_modules/one.txt @@ -0,0 +1 @@ +aaa \ No newline at end of file diff --git a/workflow-steps/cache/test-files/packages/app2/mock_node_modules/one.txt b/workflow-steps/cache/test-files/packages/app2/mock_node_modules/one.txt new file mode 100644 index 0000000..01f02e3 --- /dev/null +++ b/workflow-steps/cache/test-files/packages/app2/mock_node_modules/one.txt @@ -0,0 +1 @@ +bbb \ No newline at end of file diff --git a/workflow-steps/cache/test-files/packages/app3/mock_node_modules/one.txt b/workflow-steps/cache/test-files/packages/app3/mock_node_modules/one.txt new file mode 100644 index 0000000..2383bd5 --- /dev/null +++ b/workflow-steps/cache/test-files/packages/app3/mock_node_modules/one.txt @@ -0,0 +1 @@ +ccc \ No newline at end of file