Skip to content

Commit

Permalink
Graciously handle load measure when there is no mark (#8052) (#8064)
Browse files Browse the repository at this point in the history
* Graciously handle load measure when there is no mark

* Make public interface cleaner

(cherry picked from commit 095b974)

Co-authored-by: Daniel Espino García <[email protected]>
  • Loading branch information
mattermost-build and larkox authored Jul 8, 2024
1 parent ae151e5 commit 0dbab82
Show file tree
Hide file tree
Showing 4 changed files with 127 additions and 33 deletions.
87 changes: 73 additions & 14 deletions app/managers/performance_metrics_manager/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,44 @@ import performance from 'react-native-performance';

import {mockApiClient} from '@test/mock_api_client';
import TestHelper from '@test/test_helper';
import {logWarning} from '@utils/log';

import NetworkManager from '../network_manager';

import {testExports as batcherTestExports} from './performance_metrics_batcher';
import {getBaseReportRequest} from './test_utils';

import PerformanceMetricsManager from '.';
import {testExports} from '.';

const PerformanceMetricsManagerClass = testExports.PerformanceMetricsManager;
const {
RETRY_TIME,
MAX_RETRIES,
} = testExports;

const {
INTERVAL_TIME,
} = batcherTestExports;

const TEST_EPOCH = 1577836800000;
jest.mock('@utils/log', () => ({
logDebug: () => '',
logDebug: jest.fn(),
logWarning: jest.fn(),
}));

performance.timeOrigin = TEST_EPOCH;

describe('load metrics', () => {
const serverUrl = 'http://www.someserverurl.com/';
const expectedUrl = `${serverUrl}/api/v4/client_perf`;

const measure: PerformanceReportMeasure = {
metric: 'mobile_load',
timestamp: TEST_EPOCH + 61000,
value: 61000,
let PerformanceMetricsManager = new PerformanceMetricsManagerClass();

const getMeasure = (timestamp: number, value: number): PerformanceReportMeasure => {
return {
metric: 'mobile_load',
timestamp,
value,
};
};

beforeEach(async () => {
Expand All @@ -47,30 +63,70 @@ describe('load metrics', () => {
'setImmediate',
'setInterval',
]}).setSystemTime(new Date(TEST_EPOCH));
PerformanceMetricsManager = new PerformanceMetricsManagerClass();
});
afterEach(async () => {
jest.useRealTimers();
NetworkManager.invalidateClient(serverUrl);
await TestHelper.tearDown();
performance.clearMarks();
});

it('only load on target', async () => {
performance.mark('nativeLaunchStart');
const measure = getMeasure(TEST_EPOCH + INTERVAL_TIME, INTERVAL_TIME);
const expectedRequest = getBaseReportRequest(measure.timestamp, measure.timestamp + 1);
expectedRequest.body.histograms = [measure];

PerformanceMetricsManager.setLoadTarget('CHANNEL');
PerformanceMetricsManager.finishLoad('HOME', serverUrl);
await TestHelper.tick();
jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();
PerformanceMetricsManager.finishLoad('CHANNEL', serverUrl);
await TestHelper.tick();
jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest);
});

it('retry if the mark is not yet present', async () => {
const measure = getMeasure(TEST_EPOCH + (RETRY_TIME * 2), RETRY_TIME);
const expectedRequest = getBaseReportRequest(measure.timestamp, measure.timestamp + 1);
expectedRequest.body.histograms = [measure];

PerformanceMetricsManager.setLoadTarget('CHANNEL');
PerformanceMetricsManager.finishLoad('CHANNEL', serverUrl);
await TestHelper.tick();
jest.advanceTimersByTime(RETRY_TIME);
await TestHelper.tick();
performance.mark('nativeLaunchStart');
await TestHelper.tick();
jest.advanceTimersByTime(RETRY_TIME);
await TestHelper.tick();
await TestHelper.tick();
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest);
});

it('fail graciously if no measure is set', async () => {
PerformanceMetricsManager.setLoadTarget('CHANNEL');
PerformanceMetricsManager.finishLoad('CHANNEL', serverUrl);
for (let i = 0; i < MAX_RETRIES; i++) {
// eslint-disable-next-line no-await-in-loop
await TestHelper.tick();
jest.advanceTimersByTime(MAX_RETRIES);
// eslint-disable-next-line no-await-in-loop
await TestHelper.tick();
}
await TestHelper.tick();
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();
expect(logWarning).toHaveBeenCalled();
});
});

describe('other metrics', () => {
Expand All @@ -92,6 +148,8 @@ describe('other metrics', () => {
value: 150,
};

const PerformanceMetricsManager = new PerformanceMetricsManagerClass();

beforeEach(async () => {
NetworkManager.createClient(serverUrl1);
NetworkManager.createClient(serverUrl2);
Expand Down Expand Up @@ -119,12 +177,13 @@ describe('other metrics', () => {
NetworkManager.invalidateClient(serverUrl1);
NetworkManager.invalidateClient(serverUrl2);
await TestHelper.tearDown();
performance.clearMarks();
});

it('do not send metrics when we do not start them', async () => {
PerformanceMetricsManager.endMetric('mobile_channel_switch', serverUrl1);

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();
});
Expand All @@ -139,7 +198,7 @@ describe('other metrics', () => {
PerformanceMetricsManager.endMetric('mobile_channel_switch', serverUrl1);
await TestHelper.tick();

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest);
});
Expand All @@ -158,7 +217,7 @@ describe('other metrics', () => {
PerformanceMetricsManager.endMetric('mobile_channel_switch', serverUrl1);
await TestHelper.tick();

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest);
});
Expand All @@ -178,7 +237,7 @@ describe('other metrics', () => {
PerformanceMetricsManager.endMetric('mobile_team_switch', serverUrl1);
await TestHelper.tick();

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest);
});
Expand All @@ -201,7 +260,7 @@ describe('other metrics', () => {
PerformanceMetricsManager.endMetric('mobile_team_switch', serverUrl2);
await TestHelper.tick();

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl1, expectedRequest1);
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl2, expectedRequest2);
Expand Down
40 changes: 33 additions & 7 deletions app/managers/performance_metrics_manager/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,17 @@
import {AppState, type AppStateStatus} from 'react-native';
import performance from 'react-native-performance';

import {logWarning} from '@utils/log';

import Batcher from './performance_metrics_batcher';

type Target = 'HOME' | 'CHANNEL' | 'THREAD' | undefined;
type MetricName = 'mobile_channel_switch' |
'mobile_team_switch';

const RETRY_TIME = 100;
const MAX_RETRIES = 3;

class PerformanceMetricsManager {
private target: Target;
private batchers: {[serverUrl: string]: Batcher} = {};
Expand Down Expand Up @@ -44,17 +49,32 @@ class PerformanceMetricsManager {
}

public finishLoad(location: Target, serverUrl: string) {
this.finishLoadWithRetries(location, serverUrl, 0);
}

private finishLoadWithRetries(location: Target, serverUrl: string, retries: number) {
if (this.target !== location || this.hasRegisteredLoad) {
return;
}

const measure = performance.measure('mobile_load', 'nativeLaunchStart');
this.ensureBatcher(serverUrl).addToBatch({
metric: 'mobile_load',
value: measure.duration,
timestamp: Date.now(),
});
performance.clearMeasures('mobile_load');
try {
const measure = performance.measure('mobile_load', 'nativeLaunchStart');
this.ensureBatcher(serverUrl).addToBatch({
metric: 'mobile_load',
value: measure.duration,
timestamp: Date.now(),
});
performance.clearMeasures('mobile_load');
} catch {
// There seems to be a race condition where in some scenarios, the mobile load
// mark does not exist. We add this to avoid crashes related to this.
if (retries < MAX_RETRIES) {
setTimeout(() => this.finishLoadWithRetries(location, serverUrl, retries + 1), RETRY_TIME);
return;
}
logWarning('We could not retrieve the mobile load metric');
}

this.hasRegisteredLoad = true;
}

Expand Down Expand Up @@ -82,4 +102,10 @@ class PerformanceMetricsManager {
}
}

export const testExports = {
PerformanceMetricsManager,
RETRY_TIME,
MAX_RETRIES,
};

export default new PerformanceMetricsManager();
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import NetworkManager from '@managers/network_manager';
import {mockApiClient} from '@test/mock_api_client';
import TestHelper from '@test/test_helper';

import Batcher from './performance_metrics_batcher';
import Batcher, {testExports} from './performance_metrics_batcher';
import {getBaseReportRequest} from './test_utils';

import type ServerDataOperator from '@database/operator/server_data_operator';
Expand All @@ -14,6 +14,11 @@ jest.mock('@utils/log', () => ({
logDebug: () => '',
}));

const {
INTERVAL_TIME,
MAX_BATCH_SIZE,
} = testExports;

describe('perfromance metrics batcher', () => {
const serverUrl = 'http://www.someserverurl.com';
const expectedUrl = `${serverUrl}/api/v4/client_perf`;
Expand Down Expand Up @@ -82,7 +87,7 @@ describe('perfromance metrics batcher', () => {
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest);
});
Expand All @@ -94,15 +99,15 @@ describe('perfromance metrics batcher', () => {
expectedRequest.body.histograms = [measure1];

batcher.addToBatch(measure1);
jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest);
});

it('send the batch directly after maximum batch size is reached', async () => {
const batcher = new Batcher(serverUrl);
const expectedRequest = getBaseReportRequest(measure1.timestamp, measure2.timestamp);
for (let i = 0; i < 99; i++) {
for (let i = 0; i < MAX_BATCH_SIZE - 1; i++) {
batcher.addToBatch(measure1);
expectedRequest.body.histograms.push(measure1);
}
Expand All @@ -114,7 +119,7 @@ describe('perfromance metrics batcher', () => {
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest);

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledTimes(1);
});
Expand All @@ -123,7 +128,7 @@ describe('perfromance metrics batcher', () => {
await setMetricsConfig('false');
const batcher = new Batcher(serverUrl);
batcher.addToBatch(measure2);
jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();
});
Expand All @@ -135,7 +140,7 @@ describe('perfromance metrics batcher', () => {
batcher.forceSend();
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();
jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();
});
Expand All @@ -146,13 +151,13 @@ describe('perfromance metrics batcher', () => {
expectedRequest.body.histograms = [measure1];

batcher.addToBatch(measure1);
jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenLastCalledWith(expectedUrl, expectedRequest);

expectedRequest = getBaseReportRequest(measure2.timestamp, measure2.timestamp + 1);
expectedRequest.body.histograms = [];
for (let i = 0; i < 100; i++) {
for (let i = 0; i < MAX_BATCH_SIZE; i++) {
batcher.addToBatch(measure2);
expectedRequest.body.histograms.push(measure2);
}
Expand All @@ -163,7 +168,7 @@ describe('perfromance metrics batcher', () => {
expectedRequest.body.histograms = [measure3];

batcher.addToBatch(measure3);
jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenLastCalledWith(expectedUrl, expectedRequest);
});
Expand All @@ -182,13 +187,13 @@ describe('perfromance metrics batcher', () => {
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).toHaveBeenCalledWith(expectedUrl, expectedRequest);

mockApiClient.post.mockClear();

jest.advanceTimersByTime(61000);
jest.advanceTimersByTime(INTERVAL_TIME);
await TestHelper.tick();
expect(mockApiClient.post).not.toHaveBeenCalled();
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,4 +100,8 @@ class Batcher {
}
}

export const testExports = {
MAX_BATCH_SIZE,
INTERVAL_TIME,
};
export default Batcher;

0 comments on commit 0dbab82

Please sign in to comment.