Skip to content

Commit

Permalink
Remove unleash flag to disable checking ingress columns
Browse files Browse the repository at this point in the history
  • Loading branch information
myersCody committed Nov 26, 2024
1 parent 452ea23 commit 2395c06
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 41 deletions.
7 changes: 0 additions & 7 deletions koku/masu/processor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,13 +162,6 @@ def get_customer_group_by_limit(account: str) -> int: # pragma: no cover
return limit


def check_ingress_columns(account): # pragma: no cover
"""Should check ingress columns."""
account = convert_account(account)
context = {"schema": account}
return UNLEASH_CLIENT.is_enabled("cost-management.backend.check-ingress-columns", context)


def is_feature_unattributed_storage_enabled_aws(account):
"""Should unattributed storage feature be enabled."""
unleash_flag = "cost-management.backend.unattributed_storage.aws"
Expand Down
14 changes: 6 additions & 8 deletions koku/masu/processor/parquet/parquet_report_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from api.utils import DateHelper
from masu.config import Config
from masu.database.report_manifest_db_accessor import ReportManifestDBAccessor
from masu.processor import check_ingress_columns
from masu.processor.aws.aws_report_parquet_processor import AWSReportParquetProcessor
from masu.processor.azure.azure_report_parquet_processor import AzureReportParquetProcessor
from masu.processor.gcp.gcp_report_parquet_processor import GCPReportParquetProcessor
Expand Down Expand Up @@ -521,13 +520,12 @@ def create_parquet_table(self, parquet_file, daily=False, partition_map=None):

def check_required_columns_for_ingress_reports(self, col_names):
LOG.info(log_json(msg="checking required columns for ingress reports", context=self._context))
if not check_ingress_columns(self.schema_name):
if missing_cols := self.post_processor.check_ingress_required_columns(col_names):
message = f"Unable to process file(s) due to missing required columns: {missing_cols}."
with schema_context(self.schema_name):
report = IngressReports.objects.get(uuid=self.ingress_reports_uuid)
report.set_status(message)
raise ValidationError(message, code="Missing_columns")
if missing_cols := self.post_processor.check_ingress_required_columns(col_names):
message = f"Unable to process file(s) due to missing required columns: {missing_cols}."
with schema_context(self.schema_name):
report = IngressReports.objects.get(uuid=self.ingress_reports_uuid)
report.set_status(message)
raise ValidationError(message, code="Missing_columns")

def convert_csv_to_parquet(self, csv_filename: os.PathLike): # noqa: C901
"""Convert CSV file to parquet and send to S3."""
Expand Down
28 changes: 2 additions & 26 deletions koku/masu/test/processor/parquet/test_parquet_report_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -794,10 +794,8 @@ def test_get_metadata_kv_notocp(self, mock_stats):
result = report_processor.get_metadata_kv(filename.stem)
self.assertTupleEqual(result, expected_result)

@patch("masu.processor.parquet.parquet_report_processor.check_ingress_columns")
@patch("masu.processor._tasks.process.CostUsageReportStatus.objects")
def test_check_required_columns_for_ingress_reports_validation_error(self, mock_stats, mock_check_cols):
mock_check_cols.return_value = False
def test_check_required_columns_for_ingress_reports_validation_error(self, mock_stats):
filename = Path("pod_usage.count.csv")
with schema_context(self.schema):
ingress_report = IngressReports(
Expand Down Expand Up @@ -828,10 +826,8 @@ def test_check_required_columns_for_ingress_reports_validation_error(self, mock_
ingress_report.refresh_from_db()
self.assertIn("missing required columns", ingress_report.status)

@patch("masu.processor.parquet.parquet_report_processor.check_ingress_columns")
@patch("masu.processor._tasks.process.CostUsageReportStatus.objects")
def test_check_required_columns_for_ingress_reports(self, mock_stats, mock_check_cols):
mock_check_cols.return_value = False
def test_check_required_columns_for_ingress_reports(self, mock_stats):
filename = Path("pod_usage.count.csv")
with schema_context(self.schema):
ingress_report = IngressReports(
Expand All @@ -858,23 +854,3 @@ def test_check_required_columns_for_ingress_reports(self, mock_stats, mock_check
)
result = report_processor.check_required_columns_for_ingress_reports(RECOMMENDED_COLUMNS)
self.assertIsNone(result)

@patch("masu.processor.parquet.parquet_report_processor.check_ingress_columns")
@patch("masu.processor._tasks.process.CostUsageReportStatus.objects")
def test_check_required_columns_for_ingress_reports_disabled_check(self, mock_stats, mock_check_cols):
mock_check_cols.return_value = True
filename = Path("pod_usage.count.csv")
report_processor = ParquetReportProcessor(
schema_name=self.schema,
report_path=filename,
provider_uuid=self.aws_provider_uuid,
provider_type=Provider.PROVIDER_AWS,
manifest_id=self.manifest_id,
context={
"tracing_id": self.tracing_id,
"start_date": self.today,
},
ingress_reports_uuid=None,
)
result = report_processor.check_required_columns_for_ingress_reports(RECOMMENDED_COLUMNS)
self.assertIsNone(result)

0 comments on commit 2395c06

Please sign in to comment.