Skip to content

Commit

Permalink
Update S3 to JSON tests
Browse files Browse the repository at this point in the history
  • Loading branch information
philerooski committed Sep 7, 2023
1 parent a6c259c commit c4922d2
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 2 deletions.
8 changes: 6 additions & 2 deletions src/glue/jobs/s3_to_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,13 @@

DATA_TYPES_WITH_SUBTYPE = ["HealthKitV2Samples", "HealthKitV2Statistics"]

logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(levelname)s:%(name)s:%(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)

def transform_object_to_array_of_objects(
json_obj_to_replace: dict,
Expand Down Expand Up @@ -373,7 +378,6 @@ def write_file_to_json_dataset(
current_file_size = os.path.getsize(current_output_path)
if current_file_size > file_size_limit:
part_number += 1
print(f"!!! File is too large, creating new part {part_number}")
current_output_path = get_part_path(
metadata=metadata,
part_number=part_number,
Expand Down
11 changes: 11 additions & 0 deletions tests/test_s3_to_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ def test_transform_json_generic(self):
transformed_json = s3_to_json.transform_json(
json_obj={},
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata
)

Expand All @@ -133,6 +134,7 @@ def test_transform_json_generic(self):
sample_metadata["end_date"].isoformat()
== transformed_json["export_end_date"]
)
assert transformed_json["cohort"] == "adults_v1"

def test_transform_json_with_subtype(self):
sample_metadata = {
Expand All @@ -144,6 +146,7 @@ def test_transform_json_with_subtype(self):
transformed_json = s3_to_json.transform_json(
json_obj={},
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata
)

Expand All @@ -167,6 +170,7 @@ def test_transform_json_symptom_log(self):
transformed_json = s3_to_json.transform_json(
json_obj={"Value": json.dumps(transformed_value)},
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata
)

Expand Down Expand Up @@ -194,6 +198,7 @@ def test_transform_json_enrolled_participants_str(self):
}
},
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata
)

Expand Down Expand Up @@ -225,6 +230,7 @@ def test_transform_json_enrolled_participants_malformatted_str(self):
}
},
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata
)

Expand Down Expand Up @@ -266,6 +272,7 @@ def test_transform_json_garmin_one_level_down(self):
transformed_json = s3_to_json.transform_json(
json_obj={"TimeOffsetHeartRateSamples": time_offset_heartrate_samples},
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata
)

Expand Down Expand Up @@ -322,6 +329,7 @@ def test_transform_json_garmin_two_levels_down(self):
]
},
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata
)

Expand Down Expand Up @@ -363,6 +371,7 @@ def test_transform_block_empty_file(self, s3_obj):
transformed_block = s3_to_json.transform_block(
input_json=input_json,
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata,
block_size=2
)
Expand All @@ -381,6 +390,7 @@ def test_transform_block_non_empty_file_block_size(self, s3_obj):
transformed_block = s3_to_json.transform_block(
input_json=input_json,
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata,
block_size=2
)
Expand All @@ -405,6 +415,7 @@ def test_transform_block_non_empty_file_all_blocks(self, s3_obj):
transformed_block = s3_to_json.transform_block(
input_json=input_json,
dataset_identifier=sample_metadata["type"],
cohort="adults_v1",
metadata=sample_metadata,
block_size=10
)
Expand Down

0 comments on commit c4922d2

Please sign in to comment.