From e26ee3910f6e6f6c536e2c6d88b11206a7c8394c Mon Sep 17 00:00:00 2001 From: Tyler Burton Date: Mon, 22 Jul 2024 17:12:07 -0500 Subject: [PATCH] add logger and update action version --- .github/workflows/build-metrics-report.yml | 2 +- metrics/datagov_metrics/ga.py | 5 ++++- metrics/datagov_metrics/s3_util.py | 6 +++--- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-metrics-report.yml b/.github/workflows/build-metrics-report.yml index 2d2891788..8393fcf80 100644 --- a/.github/workflows/build-metrics-report.yml +++ b/.github/workflows/build-metrics-report.yml @@ -35,7 +35,7 @@ jobs: run: | poetry config virtualenvs.create true --local poetry config virtualenvs.in-project true --local - - uses: actions/cache@v3 + - uses: actions/cache@v4 name: Define a cache for the virtual environment based on the dependencies lock file with: path: ./.venv diff --git a/metrics/datagov_metrics/ga.py b/metrics/datagov_metrics/ga.py index c06002228..95c95de9b 100644 --- a/metrics/datagov_metrics/ga.py +++ b/metrics/datagov_metrics/ga.py @@ -1,12 +1,15 @@ import datetime import io import csv +import logging from datagov_metrics.s3_util import put_data_to_s3 import requests from google.oauth2 import service_account from googleapiclient.discovery import build +log = logging.getLogger("datagov_metrics.ga") + KEY_FILE_LOCATION = "datagov_metrics/credentials.json" GA4_PROPERTY_ID = "properties/381392243" @@ -213,7 +216,7 @@ def write_data_to_csv(response): def main(): reports = setup_reports() for report in reports: - print(f"Fetching report: {report}") + log.info(f"Fetching report: {report}") fetched_report = fetch_report(reports[report]) csv_data = write_data_to_csv(fetched_report) put_data_to_s3(f"{report}.csv", csv_data) diff --git a/metrics/datagov_metrics/s3_util.py b/metrics/datagov_metrics/s3_util.py index b5bd88b79..dce6ec43d 100644 --- a/metrics/datagov_metrics/s3_util.py +++ b/metrics/datagov_metrics/s3_util.py @@ -9,7 +9,7 @@ AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID_METRICS") AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY_METRICS") -boto3.set_stream_logger("", logging.INFO) +log = logging.getLogger("datagov_metrics.s3util") s3_client = boto3.client( "s3", @@ -24,6 +24,6 @@ def put_data_to_s3(file_name, csv_data): status = response.get("ResponseMetadata", {}).get("HTTPStatusCode") if status == 200: - print(f"Successful S3 put_object {file_name} response. Status - {status}") + log.info(f"Successful S3 put_object {file_name} response. Status - {status}") else: - print(f"Unsuccessful S3 put_object {file_name} response. Status - {status}") + log.error(f"Unsuccessful S3 put_object {file_name} response. Status - {status}")