From db6cb5db5cc086843c0e62b9a2efa680fbb54df6 Mon Sep 17 00:00:00 2001 From: Michael Levin Date: Tue, 5 Nov 2024 16:56:15 -0500 Subject: [PATCH 1/7] [Tech Debt] Replace done() with async/await --- test/actions/action.test.js | 12 ++-- .../google_analytics/query_authorizer.test.js | 16 ++--- test/process_results/result_formatter.test.js | 20 +++--- test/processor.test.js | 18 ++---- test/publish/disk.test.js | 17 +++-- test/publish/postgres.test.js | 62 ++++++++----------- test/publish/s3.test.js | 32 ++++------ 7 files changed, 73 insertions(+), 104 deletions(-) diff --git a/test/actions/action.test.js b/test/actions/action.test.js index fff9c4d5..58e931df 100644 --- a/test/actions/action.test.js +++ b/test/actions/action.test.js @@ -41,12 +41,10 @@ describe("Action", () => { } } - beforeEach((done) => { + beforeEach(async () => { promiseExecuted = false; subject = new SuccessfulTestAction(); - subject.execute(context).then(() => { - done(); - }); + await subject.execute(context); }); it("executes the strategy and awaits", () => { @@ -66,13 +64,11 @@ describe("Action", () => { } } - beforeEach((done) => { + beforeEach(async () => { promiseExecuted = false; errorlogSpy.resetHistory(); subject = new UnsuccessfulTestAction(); - subject.execute(context).catch(() => { - done(); - }); + await subject.execute(context).catch(() => {}); }); it("executes the strategy and awaits", () => { diff --git a/test/google_analytics/query_authorizer.test.js b/test/google_analytics/query_authorizer.test.js index 39db51cd..2c13e360 100644 --- a/test/google_analytics/query_authorizer.test.js +++ b/test/google_analytics/query_authorizer.test.js @@ -39,42 +39,42 @@ describe("GoogleAnalyticsQueryAuthorizer", () => { appConfig.key_file = undefined; }); - it("should resolve a query with the auth prop set to an authorized JWT", () => { - return subject.authorizeQuery(query, appConfig).then((query) => { + it("should resolve a query with the auth prop set to an authorized JWT", async () => { + await subject.authorizeQuery(query, appConfig).then((query) => { expect(query.abc).to.equal(123); expect(query.auth).to.not.be.undefined; expect(query.auth).to.be.an.instanceof(googleapis.Auth.JWT); }); }); - it("should create a JWT with the proper scopes", () => { - return subject.authorizeQuery({}, appConfig).then((query) => { + it("should create a JWT with the proper scopes", async () => { + await subject.authorizeQuery({}, appConfig).then((query) => { expect(query.auth.initArguments[3]).to.deep.equal([ "https://www.googleapis.com/auth/analytics.readonly", ]); }); }); - it("should authorize the JWT and resolve if it is valid", () => { + it("should authorize the JWT and resolve if it is valid", async () => { let jwtAuthorized = false; googleapis.Auth.JWT.prototype.authorize = (callback) => { jwtAuthorized = true; callback(null, {}); }; - return subject.authorizeQuery({}, appConfig).then(() => { + await subject.authorizeQuery({}, appConfig).then(() => { expect(jwtAuthorized).to.equal(true); }); }); - it("should authorize the JWT and reject if it is invalid", () => { + it("should authorize the JWT and reject if it is invalid", async () => { let jwtAuthorized = false; googleapis.Auth.JWT.prototype.authorize = (callback) => { jwtAuthorized = true; callback(new Error("Failed to authorize")); }; - return subject.authorizeQuery({}, appConfig).catch((err) => { + await subject.authorizeQuery({}, appConfig).catch((err) => { expect(jwtAuthorized).to.equal(true); expect(err.message).to.equal("Failed to authorize"); }); diff --git a/test/process_results/result_formatter.test.js b/test/process_results/result_formatter.test.js index 81730221..c77101f9 100644 --- a/test/process_results/result_formatter.test.js +++ b/test/process_results/result_formatter.test.js @@ -25,10 +25,10 @@ describe("ResultFormatter", () => { }); }); - it("should format results into JSON if the format is 'json'", () => { + it("should format results into JSON if the format is 'json'", async () => { const result = analyticsDataProcessor.processData({ report, data }); - return ResultFormatter.formatResult(result, { format: "json" }).then( + await ResultFormatter.formatResult(result, { format: "json" }).then( (formattedResult) => { const json = JSON.parse(formattedResult); json.data.forEach((row, index) => { @@ -46,10 +46,10 @@ describe("ResultFormatter", () => { ); }); - it("should remove the data attribute for JSON if options.slim is true", () => { + it("should remove the data attribute for JSON if options.slim is true", async () => { const result = analyticsDataProcessor.processData({ report, data }); - return ResultFormatter.formatResult(result, { + await ResultFormatter.formatResult(result, { format: "json", slim: true, }).then((formattedResult) => { @@ -58,19 +58,19 @@ describe("ResultFormatter", () => { }); }); - it("should reject if the data cannot be JSON stringified", () => { + it("should reject if the data cannot be JSON stringified", async () => { const array = []; array[0] = array; - return ResultFormatter.formatResult(array).catch((e) => { + await ResultFormatter.formatResult(array).catch((e) => { expect(e).to.match(/TypeError: Converting circular structure to JSON/); }); }); - it("should format results into CSV if the format is 'csv'", () => { + it("should format results into CSV if the format is 'csv'", async () => { const result = analyticsDataProcessor.processData({ report, data }); - return ResultFormatter.formatResult(result, { + await ResultFormatter.formatResult(result, { format: "csv", slim: true, }).then((formattedResult) => { @@ -89,10 +89,10 @@ describe("ResultFormatter", () => { }); }); - it("should throw an error if the format is unsupported", () => { + it("should throw an error if the format is unsupported", async () => { const result = analyticsDataProcessor.processData({ report, data }); - return ResultFormatter.formatResult(result, { + await ResultFormatter.formatResult(result, { format: "xml", slim: true, }).catch((e) => { diff --git a/test/processor.test.js b/test/processor.test.js index dbd3f88d..0ee2bebe 100644 --- a/test/processor.test.js +++ b/test/processor.test.js @@ -19,11 +19,9 @@ describe("Processor", () => { }; const context = {}; - beforeEach((done) => { + beforeEach(async () => { subject = new Processor([action]); - subject.processChain(context).then(() => { - done(); - }); + await subject.processChain(context); }); it("calls execute on the action", () => { @@ -40,11 +38,9 @@ describe("Processor", () => { }; const context = {}; - beforeEach((done) => { + beforeEach(async () => { subject = new Processor([action]); - subject.processChain(context).then(() => { - done(); - }); + await subject.processChain(context); }); it("does not call execute on the action", () => { @@ -67,11 +63,9 @@ describe("Processor", () => { }; const context = {}; - beforeEach((done) => { + beforeEach(async () => { subject = new Processor([action1, action2]); - subject.processChain(context).then(() => { - done(); - }); + await subject.processChain(context); }); it("calls execute on each action", () => { diff --git a/test/publish/disk.test.js b/test/publish/disk.test.js index d812b9fa..e705c648 100644 --- a/test/publish/disk.test.js +++ b/test/publish/disk.test.js @@ -18,7 +18,7 @@ describe("DiskPublisher", () => { describe(".publish({ name, data, format, directory })", () => { context("when the format is json", () => { - it("should write the results to /.json", (done) => { + it("should write the results to /.json", async () => { const options = { output: "path/to/output", formats: ["json"] }; const report = { name: "report-name" }; const results = "I'm the results"; @@ -31,22 +31,19 @@ describe("DiskPublisher", () => { return null; }; - DiskPublisher.publish({ + await DiskPublisher.publish({ name: report.name, data: results, format: options.formats[0], directory: options.output, - }) - .then(() => { - expect(fileWritten).to.be.true; - done(); - }) - .catch(done); + }).then(() => { + expect(fileWritten).to.be.true; + }); }); }); context("when the format is csv", () => { - it("should write the results to /.csv", () => { + it("should write the results to /.csv", async () => { const options = { output: "path/to/output", formats: ["csv"] }; const report = { name: "report-name" }; const results = "I'm the results"; @@ -59,7 +56,7 @@ describe("DiskPublisher", () => { return null; }; - return DiskPublisher.publish({ + await DiskPublisher.publish({ name: report.name, data: results, format: options.formats[0], diff --git a/test/publish/postgres.test.js b/test/publish/postgres.test.js index a0602901..2556c34c 100644 --- a/test/publish/postgres.test.js +++ b/test/publish/postgres.test.js @@ -9,26 +9,28 @@ const appConfig = new AppConfig(); describe("PostgresPublisher", () => { let databaseClient, results, subject; - before((done) => { + before(async () => { process.env.NODE_ENV = "test"; // Setup the database client - databaseClient = knex({ client: "pg", connection: database.connection }); - done(); + databaseClient = await knex({ + client: "pg", + connection: database.connection, + }); }); - after((done) => { + after(async () => { // Clean up the database client - databaseClient.destroy().then(() => done()); + await databaseClient.destroy(); }); - beforeEach((done) => { + beforeEach(async () => { results = Object.assign({}, resultsFixture); subject = new PostgresPublisher(appConfig); - database.resetSchema(databaseClient).then(() => done()); + await database.resetSchema(databaseClient); }); describe(".publish(results)", () => { - it("should insert a record for each results.data element", (done) => { + it("should insert a record for each results.data element", async () => { results.name = "report-name"; results.data = [ { @@ -41,7 +43,7 @@ describe("PostgresPublisher", () => { }, ]; - subject + await subject .publish(results) .then(() => { return databaseClient(PostgresPublisher.ANALYTICS_DATA_TABLE_NAME) @@ -56,12 +58,10 @@ describe("PostgresPublisher", () => { expect(row.data.name).to.equal(data.name); expect(row.date.toISOString()).to.match(RegExp(`^${data.date}`)); }); - done(); - }) - .catch(done); + }); }); - it("should coerce certain values into numbers", (done) => { + it("should coerce certain values into numbers", async () => { results.name = "report-name"; results.data = [ { @@ -72,7 +72,7 @@ describe("PostgresPublisher", () => { }, ]; - subject + await subject .publish(results) .then(() => { return databaseClient @@ -85,12 +85,10 @@ describe("PostgresPublisher", () => { expect(row.data.visits).to.equal(123); expect(row.data.total_events).to.be.a("number"); expect(row.data.total_events).to.equal(456); - done(); - }) - .catch(done); + }); }); - it("should ignore reports that don't have a date dimension", (done) => { + it("should ignore reports that don't have a date dimension", async () => { results.query = { dimensions: [{ name: "something" }, { name: "somethingElse" }], }; @@ -104,12 +102,10 @@ describe("PostgresPublisher", () => { }) .then((rows) => { expect(rows).to.have.length(0); - done(); - }) - .catch(done); + }); }); - it("should ignore data points that have already been inserted", (done) => { + it("should ignore data points that have already been inserted", async () => { const firstResults = Object.assign({}, results); const secondResults = Object.assign({}, results); @@ -168,7 +164,7 @@ describe("PostgresPublisher", () => { }, ]; - subject + await subject .publish(firstResults) .then(() => { return subject.publish(secondResults); @@ -180,12 +176,10 @@ describe("PostgresPublisher", () => { }) .then((rows) => { expect(rows).to.have.length(6); - done(); - }) - .catch(done); + }); }); - it("should overwrite existing data points if the number of visits or users has changed", (done) => { + it("should overwrite existing data points if the number of visits or users has changed", async () => { const firstResults = Object.assign({}, results); const secondResults = Object.assign({}, results); @@ -214,7 +208,7 @@ describe("PostgresPublisher", () => { }, ]; - subject + await subject .publish(firstResults) .then(() => { return subject.publish(secondResults); @@ -233,12 +227,10 @@ describe("PostgresPublisher", () => { expect(row.data.total_events).to.equal(400); } }); - done(); - }) - .catch(done); + }); }); - it("should not not insert a record if the date is invalid", (done) => { + it("should not not insert a record if the date is invalid", async () => { results.data = [ { date: "(other)", @@ -250,7 +242,7 @@ describe("PostgresPublisher", () => { }, ]; - subject + await subject .publish(results) .then(() => { return databaseClient @@ -260,9 +252,7 @@ describe("PostgresPublisher", () => { .then((rows) => { expect(rows).to.have.length(1); expect(rows[0].data.visits).to.equal(456); - done(); - }) - .catch(done); + }); }); }); }); diff --git a/test/publish/s3.test.js b/test/publish/s3.test.js index 74fb9bda..053effa3 100644 --- a/test/publish/s3.test.js +++ b/test/publish/s3.test.js @@ -54,7 +54,7 @@ describe("S3Service", () => { zlibMock.gzip = (data, cb) => cb(null, data); }); - it("should publish compressed JSON results to the S3 bucket", (done) => { + it("should publish compressed JSON results to the S3 bucket", async () => { appConfig.format = "json"; report.name = "test-report"; let gzipCalled = false; @@ -66,7 +66,7 @@ describe("S3Service", () => { subject = new S3Service({ ...appConfig, format: "json" }); - subject + await subject .publish( { name: report.name, @@ -89,12 +89,10 @@ describe("S3Service", () => { expect(putObjectCommand.config.CacheControl).to.equal("max-age=60"); expect(putObjectCommand.config.Body).to.equal("compressed data"); expect(gzipCalled).to.equal(true); - done(); - }) - .catch(done); + }); }); - it("should publish CSV results to the S3 bucket", (done) => { + it("should publish CSV results to the S3 bucket", async () => { appConfig.format = "csv"; appConfig.aws.cache = undefined; report.name = "test-report"; @@ -107,7 +105,7 @@ describe("S3Service", () => { subject = new S3Service({ ...appConfig, format: "csv" }); - subject + await subject .publish( { name: report.name, @@ -128,12 +126,10 @@ describe("S3Service", () => { expect(putObjectCommand.config.CacheControl).to.equal("max-age=0"); expect(putObjectCommand.config.Body).to.equal("compressed data"); expect(gzipCalled).to.equal(true); - done(); - }) - .catch(done); + }); }); - it("should reject if there is an error uploading the data", (done) => { + it("should reject if there is an error uploading the data", async () => { appConfig.format = "json"; shouldErrorOnSend = true; let gzipCalled = false; @@ -145,7 +141,7 @@ describe("S3Service", () => { subject = new S3Service({ ...appConfig, format: "json" }); - subject + await subject .publish( { name: report.name, @@ -157,18 +153,16 @@ describe("S3Service", () => { ) .catch(() => { expect(gzipCalled).to.equal(true); - done(); - }) - .catch(done); + }); }); - it("should reject if there is an error compressing the data", (done) => { + it("should reject if there is an error compressing the data", async () => { appConfig.format = "json"; zlibMock.gzip = (data, cb) => cb(new Error("test zlib error")); subject = new S3Service({ ...appConfig, format: "json" }); - subject + await subject .publish( { name: report.name, @@ -180,8 +174,6 @@ describe("S3Service", () => { ) .catch((err) => { expect(err.message).to.equal("test zlib error"); - done(); - }) - .catch(done); + }); }); }); From 684737f49c893605bbb8ceee0cb406d8a166e839 Mon Sep 17 00:00:00 2001 From: Shelley Nason Date: Thu, 14 Nov 2024 15:45:16 -0600 Subject: [PATCH 2/7] Add 'npm audit signatures' to CI workflow. --- .github/workflows/ci.yml | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 071512a9..badb2845 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,8 +19,24 @@ jobs: run: npm ci - name: Lint javascript run: npm run lint + audit_dependencies: + runs-on: ubuntu-latest + steps: + - name: Code checkout + uses: actions/checkout@v4 + - name: Install node + uses: actions/setup-node@v4 + with: + node-version: "lts/*" + cache: 'npm' + - name: Install node dependencies + run: npm ci + - name: Validate npm package signatures + run: npm audit signatures test: - needs: lint + needs: + - lint + - audit_dependencies runs-on: ubuntu-latest # Start Postgres as a service, wait until healthy. Uses latest Postgres version. services: From 5faf2de56b556f27fe3cd6affd45e50aaa8689a7 Mon Sep 17 00:00:00 2001 From: Shelley Nason Date: Fri, 15 Nov 2024 15:42:53 -0600 Subject: [PATCH 3/7] PR fixes. --- .github/workflows/ci.yml | 9 ++++++--- .github/workflows/deploy.yml | 4 ++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index badb2845..e647089b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ jobs: - name: Install node uses: actions/setup-node@v4 with: - node-version: "lts/*" + node-version-file: ".nvmrc" cache: 'npm' - name: Install node dependencies run: npm ci @@ -27,7 +27,7 @@ jobs: - name: Install node uses: actions/setup-node@v4 with: - node-version: "lts/*" + node-version-file: ".nvmrc" cache: 'npm' - name: Install node dependencies run: npm ci @@ -59,7 +59,7 @@ jobs: - name: Install node uses: actions/setup-node@v4 with: - node-version: "lts/*" + node-version-file: ".nvmrc" cache: 'npm' - name: Install node dependencies run: npm ci @@ -68,6 +68,7 @@ jobs: deploy_dev: needs: - lint + - audit_dependencies - test if: github.ref == 'refs/heads/develop' uses: 18F/analytics-reporter/.github/workflows/deploy.yml@develop @@ -97,6 +98,7 @@ jobs: deploy_stg: needs: - lint + - audit_dependencies - test if: github.ref == 'refs/heads/staging' uses: 18F/analytics-reporter/.github/workflows/deploy.yml@develop @@ -126,6 +128,7 @@ jobs: deploy_prd: needs: - lint + - audit_dependencies - test if: github.ref == 'refs/heads/master' uses: 18F/analytics-reporter/.github/workflows/deploy.yml@develop diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index dbfe4fd5..d9d5d0b3 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -91,7 +91,7 @@ jobs: - name: Install node uses: actions/setup-node@v4 with: - node-version: "lts/*" + node-version-file: ".nvmrc" cache: 'npm' - name: Install node dependencies # This causes npm install to omit dev dependencies per NPM docs. @@ -133,7 +133,7 @@ jobs: - name: Install node uses: actions/setup-node@v4 with: - node-version: "lts/*" + node-version-file: ".nvmrc" cache: 'npm' - name: Install node dependencies # This causes npm install to omit dev dependencies per NPM docs. From 9cd3d6b9243fbfe906c154fb7c16528c26424591 Mon Sep 17 00:00:00 2001 From: Shelley Nason Date: Fri, 15 Nov 2024 16:06:00 -0600 Subject: [PATCH 4/7] [Tech Debt] Bump node version to 22 --- .nvmrc | 2 +- package-lock.json | 2 +- package.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.nvmrc b/.nvmrc index a3597ecb..2bd5a0a9 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -20.11 +22 diff --git a/package-lock.json b/package-lock.json index 666c65d5..a6b0b47c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -42,7 +42,7 @@ "yup": "^1.4.0" }, "engines": { - "node": "20.x.x" + "node": "22.x.x" }, "optionalDependencies": { "knex": "^3.1.0", diff --git a/package.json b/package.json index b0698c28..9c69902e 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,7 @@ "*.md" ], "engines": { - "node": "20.x.x" + "node": "22.x.x" }, "preferGlobal": true, "main": "index", From 0461c244b6c648001c3a3fe7a7f354e8f201aac5 Mon Sep 17 00:00:00 2001 From: Michael Levin Date: Wed, 20 Nov 2024 11:00:06 -0500 Subject: [PATCH 5/7] [Tech Debt] Add dotenv-cli to dev dependencies and scripts --- package-lock.json | 31 ++++++++++++++++++++++++++++++- package.json | 5 ++++- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index a6b0b47c..6ce0f55a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,6 @@ "@google-analytics/data": "^4.7.0", "@smithy/node-http-handler": "^3.0.0", "@snyk/protect": "^1.1269.0", - "dotenv": "^16.3.1", "fast-csv": "^4.3.6", "googleapis": "^140.0.0", "max-listeners-exceeded-warning": "^0.0.1", @@ -30,6 +29,8 @@ "@cucumber/cucumber": "^10.3.1", "@eslint/js": "^8.57.0", "chai": "^4.4.0", + "dotenv": "^16.4.5", + "dotenv-cli": "^7.4.3", "eslint": "^8.56.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-jsdoc": "^48.7.0", @@ -4000,6 +4001,8 @@ "version": "16.4.5", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=12" }, @@ -4007,6 +4010,32 @@ "url": "https://dotenvx.com" } }, + "node_modules/dotenv-cli": { + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/dotenv-cli/-/dotenv-cli-7.4.3.tgz", + "integrity": "sha512-lf1E+TL1xFeoOHy2hSO3kLkx3KX8CDi17ccn5z5dVCnk2PuWqUKAnBVgQmhfS0BPuzFbptTEHVcIKFsGF0NAcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "dotenv": "^16.3.0", + "dotenv-expand": "^10.0.0", + "minimist": "^1.2.6" + }, + "bin": { + "dotenv": "cli.js" + } + }, + "node_modules/dotenv-expand": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-10.0.0.tgz", + "integrity": "sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + } + }, "node_modules/duplexify": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", diff --git a/package.json b/package.json index 9c69902e..aad761ff 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,8 @@ "migrate": "knex migrate:latest", "pretest": "NODE_ENV=test npm run migrate", "start": "./bin/analytics", + "start:publisher": "dotenv -e .env.analytics node -- deploy/cron.js", + "start:consumer": "dotenv -e .env.analytics node -- deploy/consumer.js", "test": "NODE_ENV=test mocha", "test:debug": "NODE_ENV=test node --inspect-brk node_modules/mocha/bin/mocha", "coverage": "nyc --reporter html --reporter text -t coverage --report-dir coverage/summary npm run test", @@ -71,7 +73,6 @@ "@google-analytics/data": "^4.7.0", "@smithy/node-http-handler": "^3.0.0", "@snyk/protect": "^1.1269.0", - "dotenv": "^16.3.1", "fast-csv": "^4.3.6", "googleapis": "^140.0.0", "max-listeners-exceeded-warning": "^0.0.1", @@ -85,6 +86,8 @@ "@cucumber/cucumber": "^10.3.1", "@eslint/js": "^8.57.0", "chai": "^4.4.0", + "dotenv": "^16.4.5", + "dotenv-cli": "^7.4.3", "eslint": "^8.56.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-jsdoc": "^48.7.0", From de85aec741e9705fd6d3552246e05805788df3b8 Mon Sep 17 00:00:00 2001 From: Michael Levin Date: Wed, 20 Nov 2024 11:00:31 -0500 Subject: [PATCH 6/7] [Feature] Add screen page views to realtime report --- reports/usa.json | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/reports/usa.json b/reports/usa.json index 6981db21..5895112c 100644 --- a/reports/usa.json +++ b/reports/usa.json @@ -8,12 +8,15 @@ "metrics": [ { "name": "activeUsers" + }, + { + "name": "screenPageViews" } ] }, "meta": { - "name": "Active Users Right Now", - "description": "Number of users currently visiting all sites." + "name": "Active users and page views realtime", + "description": "Number of users currently visiting all sites and the number of page views." } }, { From 1e72cf968d9408ad7738941bb00d1e78e32537c1 Mon Sep 17 00:00:00 2001 From: Michael Levin Date: Wed, 20 Nov 2024 12:28:57 -0500 Subject: [PATCH 7/7] [Feature] Add realtime events report --- reports/usa.json | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/reports/usa.json b/reports/usa.json index 5895112c..8f0a612e 100644 --- a/reports/usa.json +++ b/reports/usa.json @@ -19,6 +19,27 @@ "description": "Number of users currently visiting all sites and the number of page views." } }, + { + "name": "realtime-event-counts", + "frequency": "realtime", + "realtime": true, + "query": { + "dimensions": [ + { + "name": "eventName" + } + ], + "metrics": [ + { + "name": "eventCount" + } + ] + }, + "meta": { + "name": "Number of events of each type realtime", + "description": "Number of current events for each event name across all sites." + } + }, { "name": "total-sessions-30-days", "frequency": "daily",