diff --git a/app/managers/performance_metrics_manager/index.test.ts b/app/managers/performance_metrics_manager/index.test.ts index 165c650649a..14263578a92 100644 --- a/app/managers/performance_metrics_manager/index.test.ts +++ b/app/managers/performance_metrics_manager/index.test.ts @@ -5,16 +5,29 @@ import performance from 'react-native-performance'; import {mockApiClient} from '@test/mock_api_client'; import TestHelper from '@test/test_helper'; +import {logWarning} from '@utils/log'; import NetworkManager from '../network_manager'; +import {testExports as batcherTestExports} from './performance_metrics_batcher'; import {getBaseReportRequest} from './test_utils'; -import PerformanceMetricsManager from '.'; +import {testExports} from '.'; + +const PerformanceMetricsManagerClass = testExports.PerformanceMetricsManager; +const { + RETRY_TIME, + MAX_RETRIES, +} = testExports; + +const { + INTERVAL_TIME, +} = batcherTestExports; const TEST_EPOCH = 1577836800000; jest.mock('@utils/log', () => ({ - logDebug: () => '', + logDebug: jest.fn(), + logWarning: jest.fn(), })); performance.timeOrigin = TEST_EPOCH; @@ -22,11 +35,14 @@ performance.timeOrigin = TEST_EPOCH; describe('load metrics', () => { const serverUrl = 'http://www.someserverurl.com/'; const expectedUrl = `${serverUrl}/api/v4/client_perf`; - - const measure: PerformanceReportMeasure = { - metric: 'mobile_load', - timestamp: TEST_EPOCH + 61000, - value: 61000, + let PerformanceMetricsManager = new PerformanceMetricsManagerClass(); + + const getMeasure = (timestamp: number, value: number): PerformanceReportMeasure => { + return { + metric: 'mobile_load', + timestamp, + value, + }; }; beforeEach(async () => { @@ -47,30 +63,70 @@ describe('load metrics', () => { 'setImmediate', 'setInterval', ]}).setSystemTime(new Date(TEST_EPOCH)); + PerformanceMetricsManager = new PerformanceMetricsManagerClass(); }); afterEach(async () => { jest.useRealTimers(); NetworkManager.invalidateClient(serverUrl); await TestHelper.tearDown(); + performance.clearMarks(); }); it('only load on target', async () => { performance.mark('nativeLaunchStart'); + const measure = getMeasure(TEST_EPOCH + INTERVAL_TIME, INTERVAL_TIME); const expectedRequest = getBaseReportRequest(measure.timestamp, measure.timestamp + 1); expectedRequest.body.histograms = [measure]; PerformanceMetricsManager.setLoadTarget('CHANNEL'); PerformanceMetricsManager.finishLoad('HOME', serverUrl); await TestHelper.tick(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); PerformanceMetricsManager.finishLoad('CHANNEL', serverUrl); await TestHelper.tick(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); + await TestHelper.tick(); + expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest); + }); + + it('retry if the mark is not yet present', async () => { + const measure = getMeasure(TEST_EPOCH + (RETRY_TIME * 2), RETRY_TIME); + const expectedRequest = getBaseReportRequest(measure.timestamp, measure.timestamp + 1); + expectedRequest.body.histograms = [measure]; + + PerformanceMetricsManager.setLoadTarget('CHANNEL'); + PerformanceMetricsManager.finishLoad('CHANNEL', serverUrl); + await TestHelper.tick(); + jest.advanceTimersByTime(RETRY_TIME); + await TestHelper.tick(); + performance.mark('nativeLaunchStart'); + await TestHelper.tick(); + jest.advanceTimersByTime(RETRY_TIME); + await TestHelper.tick(); + await TestHelper.tick(); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest); }); + + it('fail graciously if no measure is set', async () => { + PerformanceMetricsManager.setLoadTarget('CHANNEL'); + PerformanceMetricsManager.finishLoad('CHANNEL', serverUrl); + for (let i = 0; i < MAX_RETRIES; i++) { + // eslint-disable-next-line no-await-in-loop + await TestHelper.tick(); + jest.advanceTimersByTime(MAX_RETRIES); + // eslint-disable-next-line no-await-in-loop + await TestHelper.tick(); + } + await TestHelper.tick(); + jest.advanceTimersByTime(INTERVAL_TIME); + await TestHelper.tick(); + expect(mockApiClient.post).not.toHaveBeenCalled(); + expect(logWarning).toHaveBeenCalled(); + }); }); describe('other metrics', () => { @@ -92,6 +148,8 @@ describe('other metrics', () => { value: 150, }; + const PerformanceMetricsManager = new PerformanceMetricsManagerClass(); + beforeEach(async () => { NetworkManager.createClient(serverUrl1); NetworkManager.createClient(serverUrl2); @@ -119,12 +177,13 @@ describe('other metrics', () => { NetworkManager.invalidateClient(serverUrl1); NetworkManager.invalidateClient(serverUrl2); await TestHelper.tearDown(); + performance.clearMarks(); }); it('do not send metrics when we do not start them', async () => { PerformanceMetricsManager.endMetric('mobile_channel_switch', serverUrl1); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); }); @@ -139,7 +198,7 @@ describe('other metrics', () => { PerformanceMetricsManager.endMetric('mobile_channel_switch', serverUrl1); await TestHelper.tick(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest); }); @@ -158,7 +217,7 @@ describe('other metrics', () => { PerformanceMetricsManager.endMetric('mobile_channel_switch', serverUrl1); await TestHelper.tick(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest); }); @@ -178,7 +237,7 @@ describe('other metrics', () => { PerformanceMetricsManager.endMetric('mobile_team_switch', serverUrl1); await TestHelper.tick(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest); }); @@ -201,7 +260,7 @@ describe('other metrics', () => { PerformanceMetricsManager.endMetric('mobile_team_switch', serverUrl2); await TestHelper.tick(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest1); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl2, expectedRequest2); diff --git a/app/managers/performance_metrics_manager/index.ts b/app/managers/performance_metrics_manager/index.ts index db97b4ee4cd..a3369f6a2e6 100644 --- a/app/managers/performance_metrics_manager/index.ts +++ b/app/managers/performance_metrics_manager/index.ts @@ -4,12 +4,17 @@ import {AppState, type AppStateStatus} from 'react-native'; import performance from 'react-native-performance'; +import {logWarning} from '@utils/log'; + import Batcher from './performance_metrics_batcher'; type Target = 'HOME' | 'CHANNEL' | 'THREAD' | undefined; type MetricName = 'mobile_channel_switch' | 'mobile_team_switch'; +const RETRY_TIME = 100; +const MAX_RETRIES = 3; + class PerformanceMetricsManager { private target: Target; private batchers: {[serverUrl: string]: Batcher} = {}; @@ -44,17 +49,32 @@ class PerformanceMetricsManager { } public finishLoad(location: Target, serverUrl: string) { + this.finishLoadWithRetries(location, serverUrl, 0); + } + + private finishLoadWithRetries(location: Target, serverUrl: string, retries: number) { if (this.target !== location || this.hasRegisteredLoad) { return; } - const measure = performance.measure('mobile_load', 'nativeLaunchStart'); - this.ensureBatcher(serverUrl).addToBatch({ - metric: 'mobile_load', - value: measure.duration, - timestamp: Date.now(), - }); - performance.clearMeasures('mobile_load'); + try { + const measure = performance.measure('mobile_load', 'nativeLaunchStart'); + this.ensureBatcher(serverUrl).addToBatch({ + metric: 'mobile_load', + value: measure.duration, + timestamp: Date.now(), + }); + performance.clearMeasures('mobile_load'); + } catch { + // There seems to be a race condition where in some scenarios, the mobile load + // mark does not exist. We add this to avoid crashes related to this. + if (retries < MAX_RETRIES) { + setTimeout(() => this.finishLoadWithRetries(location, serverUrl, retries + 1), RETRY_TIME); + return; + } + logWarning('We could not retrieve the mobile load metric'); + } + this.hasRegisteredLoad = true; } @@ -82,4 +102,10 @@ class PerformanceMetricsManager { } } +export const testExports = { + PerformanceMetricsManager, + RETRY_TIME, + MAX_RETRIES, +}; + export default new PerformanceMetricsManager(); diff --git a/app/managers/performance_metrics_manager/performance_metrics_batcher.test.ts b/app/managers/performance_metrics_manager/performance_metrics_batcher.test.ts index 4092909cdcb..a96a120cfff 100644 --- a/app/managers/performance_metrics_manager/performance_metrics_batcher.test.ts +++ b/app/managers/performance_metrics_manager/performance_metrics_batcher.test.ts @@ -5,7 +5,7 @@ import NetworkManager from '@managers/network_manager'; import {mockApiClient} from '@test/mock_api_client'; import TestHelper from '@test/test_helper'; -import Batcher from './performance_metrics_batcher'; +import Batcher, {testExports} from './performance_metrics_batcher'; import {getBaseReportRequest} from './test_utils'; import type ServerDataOperator from '@database/operator/server_data_operator'; @@ -14,6 +14,11 @@ jest.mock('@utils/log', () => ({ logDebug: () => '', })); +const { + INTERVAL_TIME, + MAX_BATCH_SIZE, +} = testExports; + describe('perfromance metrics batcher', () => { const serverUrl = 'http://www.someserverurl.com'; const expectedUrl = `${serverUrl}/api/v4/client_perf`; @@ -82,7 +87,7 @@ describe('perfromance metrics batcher', () => { await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest); }); @@ -94,7 +99,7 @@ describe('perfromance metrics batcher', () => { expectedRequest.body.histograms = [measure1]; batcher.addToBatch(measure1); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest); }); @@ -102,7 +107,7 @@ describe('perfromance metrics batcher', () => { it('send the batch directly after maximum batch size is reached', async () => { const batcher = new Batcher(serverUrl); const expectedRequest = getBaseReportRequest(measure1.timestamp, measure2.timestamp); - for (let i = 0; i < 99; i++) { + for (let i = 0; i < MAX_BATCH_SIZE - 1; i++) { batcher.addToBatch(measure1); expectedRequest.body.histograms.push(measure1); } @@ -114,7 +119,7 @@ describe('perfromance metrics batcher', () => { await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledTimes(1); }); @@ -123,7 +128,7 @@ describe('perfromance metrics batcher', () => { await setMetricsConfig('false'); const batcher = new Batcher(serverUrl); batcher.addToBatch(measure2); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); }); @@ -135,7 +140,7 @@ describe('perfromance metrics batcher', () => { batcher.forceSend(); await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); }); @@ -146,13 +151,13 @@ describe('perfromance metrics batcher', () => { expectedRequest.body.histograms = [measure1]; batcher.addToBatch(measure1); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenLastCalledWith(expectedUrl, expectedRequest); expectedRequest = getBaseReportRequest(measure2.timestamp, measure2.timestamp + 1); expectedRequest.body.histograms = []; - for (let i = 0; i < 100; i++) { + for (let i = 0; i < MAX_BATCH_SIZE; i++) { batcher.addToBatch(measure2); expectedRequest.body.histograms.push(measure2); } @@ -163,7 +168,7 @@ describe('perfromance metrics batcher', () => { expectedRequest.body.histograms = [measure3]; batcher.addToBatch(measure3); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenLastCalledWith(expectedUrl, expectedRequest); }); @@ -182,13 +187,13 @@ describe('perfromance metrics batcher', () => { await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest); mockApiClient.post.mockClear(); - jest.advanceTimersByTime(61000); + jest.advanceTimersByTime(INTERVAL_TIME); await TestHelper.tick(); expect(mockApiClient.post).not.toHaveBeenCalled(); }); diff --git a/app/managers/performance_metrics_manager/performance_metrics_batcher.ts b/app/managers/performance_metrics_manager/performance_metrics_batcher.ts index aad2210c2cc..7129fbaab6c 100644 --- a/app/managers/performance_metrics_manager/performance_metrics_batcher.ts +++ b/app/managers/performance_metrics_manager/performance_metrics_batcher.ts @@ -100,4 +100,8 @@ class Batcher { } } +export const testExports = { + MAX_BATCH_SIZE, + INTERVAL_TIME, +}; export default Batcher;