Skip to content

Commit

Permalink
Add 'period_sampling_seconds' param instead of hard-coded 600 seconds
Browse files Browse the repository at this point in the history
  • Loading branch information
leplatrem committed Sep 10, 2024
1 parent da99b1c commit c725a03
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 10 deletions.
13 changes: 7 additions & 6 deletions checks/normandy/uptake_error_rate.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""
The percentage of reported errors in Uptake Telemetry should be under the specified
maximum. Error rate is computed for each period of 10min.
maximum. Error rate is computed for each period (of 10min by default).
For each recipe whose error rate is above the maximum, the total number of events
for each status is returned. The min/max timestamps give the datetime range of the
Expand Down Expand Up @@ -46,7 +46,7 @@
client_timestamp,
event_string_value AS status,
`moz-fx-data-shared-prod`.udf.get_key(event_map_values, "source") AS source,
epoch - MOD(epoch, 600) AS period
epoch - MOD(epoch, {period_sampling_seconds}) AS period
FROM
event_uptake_telemetry
WHERE event_category = 'uptake.remotecontent.result'
Expand All @@ -57,7 +57,7 @@
SELECT
-- Min/Max timestamps of this period
PARSE_TIMESTAMP('%s', CAST(period AS STRING)) AS min_timestamp,
PARSE_TIMESTAMP('%s', CAST(period + 600 AS STRING)) AS max_timestamp,
PARSE_TIMESTAMP('%s', CAST(period + {period_sampling_seconds} AS STRING)) AS max_timestamp,
normalized_channel AS channel,
source,
status,
Expand Down Expand Up @@ -87,14 +87,14 @@
NORMANDY_STATUSES = {(k.split("_")[0], v): k for k, v in UPTAKE_STATUSES.items()}


async def fetch_normandy_uptake(channels: List[str], period_hours: int):
async def fetch_normandy_uptake(channels: List[str], period_hours: int, period_sampling_seconds: int):
# Filter by channel if parameter is specified.
channel_condition = (
f"AND LOWER(normalized_channel) IN ({csv_quoted(channels)})" if channels else ""
)
return await fetch_bigquery(
EVENTS_TELEMETRY_QUERY.format(
period_hours=period_hours, channel_condition=channel_condition
period_hours=period_hours, channel_condition=channel_condition, period_sampling_seconds=period_sampling_seconds
)
)

Expand All @@ -111,6 +111,7 @@ async def run(
sources: List[str] = [],
channels: List[str] = [],
period_hours: int = 6,
period_sampling_seconds: int = 600,
) -> CheckResult:
if not isinstance(max_error_percentage, dict):
max_error_percentage = {"default": max_error_percentage}
Expand All @@ -137,7 +138,7 @@ async def run(
}
enabled_recipe_ids = enabled_recipes_by_ids.keys()

rows = await fetch_normandy_uptake(channels=channels, period_hours=period_hours)
rows = await fetch_normandy_uptake(channels=channels, period_hours=period_hours, period_sampling_seconds=period_sampling_seconds)

min_timestamp = min(r["min_timestamp"] for r in rows)
max_timestamp = max(r["max_timestamp"] for r in rows)
Expand Down
10 changes: 7 additions & 3 deletions checks/remotesettings/uptake_error_rate.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""
The percentage of reported errors in Uptake Telemetry should be under the specified
maximum. Error rate is computed for each period of 10min.
maximum. Error rate is computed for each period (of 10min by default).
For each source whose error rate is above the maximum, the total number of events
for each status is returned. The min/max timestamps give the datetime range of the
Expand Down Expand Up @@ -36,7 +36,7 @@
normalized_channel,
SPLIT(app_version, '.')[OFFSET(0)] AS version,
`moz-fx-data-shared-prod`.udf.get_key(event_map_values, "source") AS source,
UNIX_SECONDS(timestamp) - MOD(UNIX_SECONDS(timestamp), 600) AS period,
UNIX_SECONDS(timestamp) - MOD(UNIX_SECONDS(timestamp), {period_sampling_seconds}) AS period,
event_string_value AS status
FROM
`moz-fx-data-shared-prod.telemetry_derived.events_live`
Expand All @@ -51,7 +51,7 @@
SELECT
-- Min/Max timestamps of this period
PARSE_TIMESTAMP('%s', CAST(period AS STRING)) AS min_timestamp,
PARSE_TIMESTAMP('%s', CAST(period + 600 AS STRING)) AS max_timestamp,
PARSE_TIMESTAMP('%s', CAST(period + {period_sampling_seconds} AS STRING)) AS max_timestamp,
source,
status,
normalized_channel AS channel,
Expand All @@ -68,6 +68,7 @@ async def fetch_remotesettings_uptake(
channels: List[str],
sources: List[str],
period_hours: int,
period_sampling_seconds: int,
min_version: Optional[tuple],
):
version_condition = (
Expand All @@ -82,6 +83,7 @@ async def fetch_remotesettings_uptake(
return await fetch_bigquery(
EVENTS_TELEMETRY_QUERY.format(
period_hours=period_hours,
period_sampling_seconds=period_sampling_seconds,
source_condition=source_condition,
version_condition=version_condition,
channel_condition=channel_condition,
Expand Down Expand Up @@ -113,6 +115,7 @@ async def run(
ignore_status: List[str] = [],
ignore_versions: List[int] = [],
period_hours: int = 4,
period_sampling_seconds: int = 600,
include_legacy_versions: bool = False,
) -> CheckResult:
min_version = await current_firefox_esr() if not include_legacy_versions else None
Expand All @@ -121,6 +124,7 @@ async def run(
sources=sources,
channels=channels,
period_hours=period_hours,
period_sampling_seconds=period_sampling_seconds,
min_version=min_version,
)

Expand Down
4 changes: 3 additions & 1 deletion checks/remotesettings/uptake_max_age.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
timestamp AS submission_timestamp,
normalized_channel AS channel,
-- Periods of 10min
UNIX_SECONDS(timestamp) - MOD(UNIX_SECONDS(timestamp), 600) AS period,
UNIX_SECONDS(timestamp) - MOD(UNIX_SECONDS(timestamp), {period_sampling_seconds}) AS period,
SAFE_CAST(`moz-fx-data-shared-prod`.udf.get_key(event_map_values, "age") AS INT64) AS age
FROM
`moz-fx-data-shared-prod.telemetry_derived.events_live`
Expand Down Expand Up @@ -67,6 +67,7 @@ async def run(
max_percentiles: Dict[str, int],
channels: List[str] = ["release"],
period_hours: int = 6,
period_sampling_seconds: int = 600,
include_legacy_versions: bool = False,
) -> CheckResult:
version_condition = ""
Expand All @@ -79,6 +80,7 @@ async def run(
rows = await fetch_bigquery(
EVENTS_TELEMETRY_QUERY.format(
period_hours=period_hours,
period_sampling_seconds=period_sampling_seconds,
channel_condition=channel_condition,
version_condition=version_condition,
)
Expand Down

0 comments on commit c725a03

Please sign in to comment.