Skip to content

Commit

Permalink
reconfig config
Browse files Browse the repository at this point in the history
  • Loading branch information
maskarb committed Dec 11, 2024
1 parent 5653d45 commit d8bc7a3
Show file tree
Hide file tree
Showing 8 changed files with 70 additions and 37 deletions.
9 changes: 9 additions & 0 deletions deploy/clowdapp.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5639,6 +5639,15 @@ objects:
kind: Secret
metadata:
name: koku-secret
- apiVersion: v1
data:
aws-credentials: ${AWS_CREDENTIALS_EPH}
stringData:
aws-access-key-id: ${AWS_ACCESS_KEY_ID_EPH}
aws-secret-access-key: ${AWS_SECRET_ACCESS_KEY_EPH}
kind: Secret
metadata:
name: koku-aws
- apiVersion: v1
data:
gcp-credentials: ${GCP_CREDENTIALS_EPH}
Expand Down
15 changes: 9 additions & 6 deletions deploy/kustomize/base/base.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -157,12 +157,15 @@ objects:
name: koku-secret
data:
django-secret-key: "${SECRET_KEY}"
# - apiVersion: v1
# kind: Secret # For ephemeral/local environment only
# metadata:
# name: koku-aws
# data:
# aws-credentials: "${AWS_CREDENTIALS_EPH}"
- apiVersion: v1
kind: Secret
metadata:
name: koku-aws
data:
aws-credentials: ${AWS_CREDENTIALS_EPH}
stringData:
aws-access-key-id: ${AWS_ACCESS_KEY_ID_EPH}
aws-secret-access-key: ${AWS_SECRET_ACCESS_KEY_EPH}
- apiVersion: v1
kind: Secret # For ephemeral/local environment only
metadata:
Expand Down
21 changes: 11 additions & 10 deletions dev/scripts/clean_glue.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,24 +15,25 @@ def clear_glue_data():
"aws_access_key_id": os.environ.get("TRINO_AWS_ACCESS_KEY_ID"),
"aws_secret_access_key": os.environ.get("TRINO_AWS_SECRET_ACCESS_KEY"),
}

path_prefixes = {
"s3_csv_path": f"data/csv/{schema}",
"s3_parquet_path": f"data/parquet/{schema}",
"s3_daily_parquet": f"data/parquet/daily/{schema}",
}

for _, file_prefix in path_prefixes.items():
s3_resource = boto3.resource("s3", **credentials)
s3_bucket = s3_resource.Bucket(bucket_name)
object_keys = [s3_object.key for s3_object in s3_bucket.objects.filter(Prefix=file_prefix)]
for key in object_keys:
s3_resource.Object(bucket_name, key).delete()
print(f"Removing s3 files for prefix: {file_prefix}")
s3_client = boto3.client("s3", **credentials)
paginator = s3_client.get_paginator("list_objects_v2")
for obj_list in paginator.paginate(Bucket=bucket_name, Prefix=file_prefix):
if "Contents" in obj_list:
keys = [{"Key": x["Key"]} for x in obj_list["Contents"]]
s3_client.delete_objects(Bucket=bucket_name, Delete={"Objects": keys})
print(f"Removed s3 files for prefix: {file_prefix}")

client = boto3.client("glue", **credentials)
glue_client = boto3.client("glue", **credentials)
try:
client.delete_database(Name=schema)
print(f"Deleteing database: {schema}")
glue_client.delete_database(Name=schema)
print(f"Deleting database: {schema}")
except Exception:
print(f"Failed to delete db: {schema}, its possible it was already deleted")

Expand Down
7 changes: 4 additions & 3 deletions koku/koku/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,9 @@
from oci.exceptions import ConfigFileNotFound

from . import database
from . import sentry
from .configurator import CONFIGURATOR
from .env import ENVIRONMENT


# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases

Expand Down Expand Up @@ -493,20 +491,23 @@
REQUESTED_ROS_BUCKET = ENVIRONMENT.get_value("REQUESTED_ROS_BUCKET", default="ros-report")
REQUESTED_SUBS_BUCKET = ENVIRONMENT.get_value("REQUESTED_SUBS_BUCKET", default="subs-report")
S3_TIMEOUT = ENVIRONMENT.int("S3_CONNECTION_TIMEOUT", default=60)
S3_ENDPOINT = CONFIGURATOR.get_object_store_endpoint()
S3_DEFAULT_ENDPOINT = ENVIRONMENT.get_value("S3_ENDPOINT", default="https://s3.amazonaws.com")
S3_REGION = ENVIRONMENT.get_value("S3_REGION", default="us-east-1")
S3_BUCKET_PATH = ENVIRONMENT.get_value("S3_BUCKET_PATH", default="data_archive")
S3_BUCKET_NAME = CONFIGURATOR.get_object_store_bucket(REQUESTED_BUCKET)
S3_ACCESS_KEY = CONFIGURATOR.get_object_store_access_key(REQUESTED_BUCKET)
S3_SECRET = CONFIGURATOR.get_object_store_secret_key(REQUESTED_BUCKET)
S3_HCS_ENDPOINT = CONFIGURATOR.get_object_store_endpoint()
S3_ROS_BUCKET_NAME = CONFIGURATOR.get_object_store_bucket(REQUESTED_ROS_BUCKET)
S3_ROS_ACCESS_KEY = CONFIGURATOR.get_object_store_access_key(REQUESTED_ROS_BUCKET)
S3_ROS_SECRET = CONFIGURATOR.get_object_store_secret_key(REQUESTED_ROS_BUCKET)
S3_ROS_REGION = CONFIGURATOR.get_object_store_region(REQUESTED_ROS_BUCKET)
S3_ROS_ENDPOINT = CONFIGURATOR.get_object_store_endpoint()
S3_SUBS_BUCKET_NAME = CONFIGURATOR.get_object_store_bucket(REQUESTED_SUBS_BUCKET)
S3_SUBS_ACCESS_KEY = CONFIGURATOR.get_object_store_access_key(REQUESTED_SUBS_BUCKET)
S3_SUBS_SECRET = CONFIGURATOR.get_object_store_secret_key(REQUESTED_SUBS_BUCKET)
S3_SUBS_REGION = CONFIGURATOR.get_object_store_region(REQUESTED_SUBS_BUCKET)
S3_SUBS_ENDPOINT = CONFIGURATOR.get_object_store_endpoint()
SKIP_MINIO_DATA_DELETION = ENVIRONMENT.bool("SKIP_MINIO_DATA_DELETION", default=False)

ENABLE_S3_ARCHIVING = ENVIRONMENT.bool("ENABLE_S3_ARCHIVING", default=False)
Expand Down
3 changes: 1 addition & 2 deletions koku/masu/external/ros_report_shipper.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from masu.prometheus_stats import KAFKA_CONNECTION_ERRORS_COUNTER
from masu.util.ocp import common as utils


LOG = logging.getLogger(__name__)


Expand All @@ -34,7 +33,7 @@ def get_ros_s3_client(): # pragma: no cover
aws_secret_access_key=settings.S3_ROS_SECRET,
region_name=settings.S3_ROS_REGION,
)
return s3_session.client("s3", endpoint_url=settings.S3_ENDPOINT, config=config)
return s3_session.client("s3", endpoint_url=settings.S3_ROS_ENDPOINT, config=config)


def generate_s3_object_url(client, upload_key): # pragma: no cover
Expand Down
40 changes: 28 additions & 12 deletions koku/masu/util/aws/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import math
import re
import time
import typing as t
import uuid
from itertools import chain

Expand Down Expand Up @@ -464,8 +463,8 @@ def update_account_aliases(provider: Provider):
def get_bills_from_provider(
provider_uuid: str,
schema: str,
start_date: t.Union[datetime.datetime, str] = None,
end_date: t.Union[datetime.datetime, str] = None,
start_date: datetime.datetime | str = None,
end_date: datetime.datetime | str = None,
) -> list[AWSCostEntryBill]:
"""
Return the AWS bill IDs given a provider UUID.
Expand Down Expand Up @@ -512,7 +511,9 @@ def get_bills_from_provider(
return bills


def get_s3_resource(access_key=None, secret_key=None, region=None, profile_name=None): # pragma: no cover
def get_s3_resource(
*, access_key=None, secret_key=None, region=None, profile_name=None, endpoint_url=settings.S3_DEFAULT_ENDPOINT
): # pragma: no cover
"""
Obtain the s3 session client
"""
Expand All @@ -523,10 +524,10 @@ def get_s3_resource(access_key=None, secret_key=None, region=None, profile_name=
region_name=region,
profile_name=profile_name,
)
return aws_session.resource("s3", endpoint_url=settings.S3_ENDPOINT, config=config)
return aws_session.resource("s3", endpoint_url=endpoint_url, config=config)


def copy_data_to_s3_bucket(request_id, path, filename, data, metadata=None, context=None):
def copy_data_to_s3_bucket(request_id, path, filename, data, metadata=None, context=None, s3_resource=None):
"""
Copies data to s3 bucket file
"""
Expand All @@ -536,7 +537,8 @@ def copy_data_to_s3_bucket(request_id, path, filename, data, metadata=None, cont
extra_args = {}
if metadata:
extra_args["Metadata"] = metadata
s3_resource = get_s3_resource(profile_name="default")
if s3_resource is None:
s3_resource = get_s3_resource(profile_name="default")
s3_obj = {"bucket_name": settings.S3_BUCKET_NAME, "key": upload_key}
upload = s3_resource.Object(**s3_obj)
try:
Expand Down Expand Up @@ -564,18 +566,32 @@ def copy_local_report_file_to_s3_bucket(


def copy_local_hcs_report_file_to_s3_bucket(
request_id, s3_path, full_file_path, local_filename, finalize=False, finalize_date=None, context={}
request_id,
s3_path,
full_file_path,
local_filename,
finalize=False,
finalize_date=None,
context=None,
):
"""
Copies local report file to s3 bucket
"""
if context is None:
context = {}
if s3_path and settings.ENABLE_S3_ARCHIVING:
LOG.info(f"copy_local_HCS_report_file_to_s3_bucket: {s3_path} {full_file_path}")
s3_resource = get_s3_resource(
access_key=settings.S3_ACCESS_KEY,
secret_key=settings.S3_SECRET,
region=settings.S3_REGION,
endpoint_url=settings.S3_HCS_ENDPOINT,
)
with open(full_file_path, "rb") as fin:
metadata = {"finalized": str(finalize)}
if finalize and finalize_date:
metadata["finalized-date"] = finalize_date
copy_data_to_s3_bucket(request_id, s3_path, local_filename, fin, metadata, context)
copy_data_to_s3_bucket(request_id, s3_path, local_filename, fin, metadata, context, s3_resource)


def _get_s3_objects(s3_path):
Expand Down Expand Up @@ -656,11 +672,11 @@ def safe_str_int_conversion(value):

def filter_s3_objects_less_than(
request_id: str,
keys: t.List[str],
keys: list[str],
metadata_key: str,
metadata_value_check: str,
context: t.Optional[t.Dict] = None,
) -> t.List[str]:
context: dict | None = None,
) -> list[str]:
"""Filter S3 object keys based on a metadata key integer value comparison.
Parameters:
Expand Down
6 changes: 4 additions & 2 deletions koku/subs/subs_data_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from reporting.models import SubsIDMap
from reporting.models import SubsLastProcessed


LOG = logging.getLogger(__name__)


Expand All @@ -40,7 +39,10 @@ def __init__(self, tracing_id, context):
self.creation_processing_time = self.creation_processing_time - timedelta(days=1)
self.tracing_id = tracing_id
self.s3_resource = get_s3_resource(
settings.S3_SUBS_ACCESS_KEY, settings.S3_SUBS_SECRET, settings.S3_SUBS_REGION
access_key=settings.S3_SUBS_ACCESS_KEY,
secret_key=settings.S3_SUBS_SECRET,
region=settings.S3_SUBS_REGION,
endpoint_url=settings.S3_SUBS_ENDPOINT,
)
self.context = context

Expand Down
6 changes: 4 additions & 2 deletions koku/subs/subs_data_messenger.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from masu.prometheus_stats import RHEL_ELS_VCPU_HOURS
from masu.util.aws.common import get_s3_resource


LOG = logging.getLogger(__name__)

HPC_ROLE = "Red Hat Enterprise Linux Compute Node"
Expand All @@ -51,7 +50,10 @@ def __init__(self, context, schema_name, tracing_id):
self.tracing_id = tracing_id
self.schema_name = schema_name
self.s3_resource = get_s3_resource(
settings.S3_SUBS_ACCESS_KEY, settings.S3_SUBS_SECRET, settings.S3_SUBS_REGION
access_key=settings.S3_SUBS_ACCESS_KEY,
secret_key=settings.S3_SUBS_SECRET,
region=settings.S3_SUBS_REGION,
endpoint_url=settings.S3_SUBS_ENDPOINT,
)
subs_cust = Customer.objects.get(schema_name=schema_name)
self.account_id = subs_cust.account_id
Expand Down

0 comments on commit d8bc7a3

Please sign in to comment.