Skip to content

Commit

Permalink
[COST-5768] resource verification
Browse files Browse the repository at this point in the history
  • Loading branch information
myersCody committed Dec 9, 2024
1 parent c56449c commit 1b2994a
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 8 deletions.
18 changes: 10 additions & 8 deletions koku/masu/database/gcp_report_db_accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,14 +572,16 @@ def verify_populate_ocp_on_cloud_daily_trino(self, verification_params):
"""
Verify the managed trino table population went successfully.
"""
verification_sql = pkgutil.get_data("masu.database", "trino_sql/verify/managed_ocp_on_gcp_verification.sql")
verification_sql = verification_sql.decode("utf-8")
LOG.info(log_json(msg="running verification for managed OCP on GCP daily SQL", **verification_params))
result = self._execute_trino_multipart_sql_query(verification_sql, bind_params=verification_params)
if False in result[0]:
LOG.error(log_json(msg="Verification failed", **verification_params))
else:
LOG.info(log_json(msg="Verification successful", **verification_params))
for file in ["managed_ocp_on_gcp_verification.sql", "managed_resources.sql"]:
verify_path = f"trino_sql/verify/gcp/{file}"
LOG.info(log_json(msg="running verification for managed OCP on GCP daily SQL", file=file))
verification_sql = pkgutil.get_data("masu.database", f"{verify_path}")
verification_sql = verification_sql.decode("utf-8")
result = self._execute_trino_multipart_sql_query(verification_sql, bind_params=verification_params)
if result[0]:
LOG.info(log_json(msg="Verification successful", **verification_params))
return
LOG.error(log_json(msg="Verification failed", **verification_params))

def populate_ocp_on_cloud_daily_trino(
self, gcp_provider_uuid, openshift_provider_uuid, start_date, end_date, matched_tags
Expand Down
66 changes: 66 additions & 0 deletions koku/masu/database/trino_sql/verify/gcp/managed_resources.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
WITH cte_agg_tags AS (
SELECT ARRAY{{matched_tag_array | sqlsafe}} AS matched_tags
),
cte_resource_breakdown AS (
SELECT
source_type,
resource_name,
usage_start_time,
SUM(cost) AS cost
FROM (
SELECT 'parquet' AS source_type, resource_name, usage_start_time, cost
FROM hive.{{schema | sqlsafe}}.gcp_openshift_daily parquet_table
WHERE source = {{cloud_source_uuid}}
AND year = {{year}} AND month = {{month}}
AND (ocp_matched = TRUE OR EXISTS (
SELECT 1
FROM cte_agg_tags
WHERE any_match(matched_tags, x -> strpos(parquet_table.labels, x) != 0)
))
UNION ALL
SELECT 'managed' AS source_type, resource_name, usage_start_time, cost
FROM hive.{{schema | sqlsafe}}.managed_gcp_openshift_daily
WHERE source = {{cloud_source_uuid}}
AND year = {{year}} AND month = {{month}}
AND (resource_id_matched = TRUE OR matched_tag != '')
) aggregated_data
GROUP BY source_type, resource_name, usage_start_time
),
cte_discrepancies AS (
SELECT
p.resource_name,
p.usage_start_time,
p.cost AS parquet_cost,
m.cost AS managed_cost
FROM cte_resource_breakdown p
LEFT JOIN cte_resource_breakdown m
ON p.resource_name = m.resource_name
AND p.usage_start_time = m.usage_start_time
AND m.source_type = 'managed'
WHERE p.source_type = 'parquet'
AND (COALESCE(m.cost, 0) != p.cost)
),
cte_initial_cost_check AS (
SELECT
gcp.resource_name,
gcp.usage_start_time,
SUM(gcp.cost) AS initial_cost,
MAX(d.managed_cost) AS managed_cost,
MAX(d.parquet_cost) AS parquet_cost,
CASE
WHEN SUM(gcp.cost) < MAX(d.parquet_cost) AND SUM(gcp.cost) = MAX(d.managed_cost)
THEN TRUE ELSE FALSE
END AS parquet_issue
FROM hive.{{schema | sqlsafe}}.gcp_line_items_daily gcp
JOIN cte_discrepancies d
ON gcp.resource_name = d.resource_name
AND gcp.usage_start_time = d.usage_start_time
WHERE gcp.source = {{cloud_source_uuid}}
AND gcp.year = {{year}} AND gcp.month = {{month}}
GROUP BY gcp.resource_name, gcp.usage_start_time
)
SELECT NOT EXISTS (
SELECT 1
FROM cte_initial_cost_check
WHERE parquet_issue = FALSE
) AS counts_match;

0 comments on commit 1b2994a

Please sign in to comment.