Skip to content

Commit

Permalink
add logger and update action version
Browse files Browse the repository at this point in the history
  • Loading branch information
btylerburton committed Jul 22, 2024
1 parent 5638c44 commit e26ee39
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 5 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-metrics-report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:
run: |
poetry config virtualenvs.create true --local
poetry config virtualenvs.in-project true --local
- uses: actions/cache@v3
- uses: actions/cache@v4
name: Define a cache for the virtual environment based on the dependencies lock file
with:
path: ./.venv
Expand Down
5 changes: 4 additions & 1 deletion metrics/datagov_metrics/ga.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import datetime
import io
import csv
import logging

from datagov_metrics.s3_util import put_data_to_s3
import requests
from google.oauth2 import service_account
from googleapiclient.discovery import build

log = logging.getLogger("datagov_metrics.ga")

KEY_FILE_LOCATION = "datagov_metrics/credentials.json"
GA4_PROPERTY_ID = "properties/381392243"

Expand Down Expand Up @@ -213,7 +216,7 @@ def write_data_to_csv(response):
def main():
reports = setup_reports()
for report in reports:
print(f"Fetching report: {report}")
log.info(f"Fetching report: {report}")
fetched_report = fetch_report(reports[report])
csv_data = write_data_to_csv(fetched_report)
put_data_to_s3(f"{report}.csv", csv_data)
Expand Down
6 changes: 3 additions & 3 deletions metrics/datagov_metrics/s3_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID_METRICS")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY_METRICS")

boto3.set_stream_logger("", logging.INFO)
log = logging.getLogger("datagov_metrics.s3util")

s3_client = boto3.client(
"s3",
Expand All @@ -24,6 +24,6 @@ def put_data_to_s3(file_name, csv_data):
status = response.get("ResponseMetadata", {}).get("HTTPStatusCode")

if status == 200:
print(f"Successful S3 put_object {file_name} response. Status - {status}")
log.info(f"Successful S3 put_object {file_name} response. Status - {status}")
else:
print(f"Unsuccessful S3 put_object {file_name} response. Status - {status}")
log.error(f"Unsuccessful S3 put_object {file_name} response. Status - {status}")

0 comments on commit e26ee39

Please sign in to comment.