Skip to content

Commit

Permalink
Hide the raw fixtures in the joint fixture
Browse files Browse the repository at this point in the history
  • Loading branch information
Cito committed Aug 7, 2024
1 parent b60feaa commit 8e9e71d
Show file tree
Hide file tree
Showing 7 changed files with 107 additions and 61 deletions.
85 changes: 66 additions & 19 deletions tests/fixtures/joint.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from hexkit.custom_types import Ascii, JsonObject
from hexkit.providers.akafka import KafkaEventSubscriber
from hexkit.providers.akafka.testutils import KafkaFixture
from hexkit.providers.mongodb.testutils import MongoDbFixture
from hexkit.providers.mongodb.testutils import MongoClient, MongoDbFixture

from mass.config import Config
from mass.core import models
Expand Down Expand Up @@ -55,23 +55,34 @@ class State:
class JointFixture:
"""A fixture embedding all other fixtures."""

# attributes that can be used freely in tests

config: Config
query_handler: QueryHandlerPort
mongodb: MongoDbFixture
kafka: KafkaFixture
rest_client: AsyncTestClient
event_subscriber: KafkaEventSubscriber
resources: dict[str, list[models.Resource]]
rest_client: AsyncTestClient

# attributes that should not be accessed by tests directly

def empty_database(self) -> None:
_mongodb: MongoDbFixture
_kafka: KafkaFixture
_query_handler: QueryHandlerPort
_event_subscriber: KafkaEventSubscriber

# convenience methods that can be accessed by tests directly

def purge_database(self) -> None:
"""Empty the database."""
self.mongodb.empty_collections()
self._mongodb.empty_collections()
state.database_dirty = True

async def purge_events(self) -> None:
"""Purge all events."""
await self._kafka.clear_topics()

async def load_test_data(self) -> None:
"""Populate a collection for each file in test_data."""
filename_pattern = re.compile(r"/(\w+)\.json")
self.query_handler._dao_collection._indexes_created = False # type: ignore
self._query_handler._dao_collection._indexes_created = False # type: ignore
for filename in glob.glob("tests/fixtures/test_data/*.json"):
match_obj = re.search(filename_pattern, filename)
if match_obj:
Expand All @@ -81,17 +92,17 @@ async def load_test_data(self) -> None:
resources = get_resources_from_file(filename)
state.resources[collection_name] = resources
for resource in resources:
await self.query_handler.load_resource(
await self._query_handler.load_resource(
resource=resource, class_name=collection_name
)

async def reset_state(self) -> None:
"""Reset the state of the database and event topics if needed."""
if state.events_dirty:
await self.kafka.clear_topics()
await self.purge_events()
state.events_dirty = False
if state.database_dirty:
self.mongodb.empty_collections()
self.purge_database()
await self.load_test_data()
state.database_dirty = False

Expand All @@ -104,23 +115,59 @@ async def call_search_endpoint(self, params: QueryParams) -> models.QueryResults
response.raise_for_status()
return models.QueryResults(**result)

@property
def mongodb_client(self) -> MongoClient:
"""Get a MongoDB client and mark the database state as dirty."""
state.database_dirty = True
return self._mongodb.client

def recreate_mongodb_indexes(self) -> None:
"""Set flag to recreate MongoDB indexes and mark the database state as dirty."""
self._query_handler._dao_collection._indexes_created = False # type: ignore
state.database_dirty = True

async def handle_query(
self,
class_name: str,
query: str,
filters: list[models.Filter],
skip: int = 0,
limit: int | None = None,
sorting_parameters: list[models.SortingParameter] | None = None,
) -> models.QueryResults:
"""Handle a query."""
return await self._query_handler.handle_query(
class_name=class_name,
query=query,
filters=filters,
skip=skip,
limit=limit,
sorting_parameters=sorting_parameters,
)

async def delete_resource(self, resource_id: str, class_name: str) -> None:
"""Delete a resource and mark the database state as dirty."""
await self.query_handler.delete_resource(
await self._query_handler.delete_resource(
resource_id=resource_id, class_name=class_name
)
state.database_dirty = True

async def load_resource(self, resource: models.Resource, class_name: str) -> None:
"""Load a resource and mark the database state as dirty."""
await self.query_handler.load_resource(resource=resource, class_name=class_name)
await self._query_handler.load_resource(
resource=resource, class_name=class_name
)
state.database_dirty = True

async def consume_event(self) -> None:
"""Run the event subscriber in order to to consume an event."""
await self._event_subscriber.run(forever=False)

async def publish_event(
self, payload: JsonObject, type_: Ascii, topic: Ascii, key: Ascii = "test"
) -> None:
"""Publish a test event and mark the events state as dirty."""
await self.kafka.publish_event(
await self._kafka.publish_event(
payload=payload, type_=type_, topic=topic, key=key
)
state.events_dirty = True
Expand All @@ -144,10 +191,10 @@ async def joint_fixture(
):
joint_fixture = JointFixture(
config=config,
query_handler=query_handler,
event_subscriber=event_subscriber,
kafka=kafka,
mongodb=mongodb,
_query_handler=query_handler,
_event_subscriber=event_subscriber,
_kafka=kafka,
_mongodb=mongodb,
rest_client=rest_client,
resources=state.resources,
)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ async def test_malformed_document(
joint_fixture: JointFixture, caplog: pytest.LogCaptureFixture
):
"""Test behavior from API perspective upon querying when bad doc exists"""
joint_fixture.empty_database()
joint_fixture.purge_database()

# define and load a new resource without all the required facets
resource = models.Resource(
Expand Down
12 changes: 6 additions & 6 deletions tests/test_consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ async def test_resource_upsert(
):
"""Try upserting with no pre-existing resource with matching ID (i.e. insert)"""
# get all the documents in the collection
results_all = await joint_fixture.query_handler.handle_query(
results_all = await joint_fixture.handle_query(
class_name=CLASS_NAME, query="", filters=[]
)
assert results_all.count > 0
Expand Down Expand Up @@ -69,10 +69,10 @@ async def test_resource_upsert(
)

# consume the event
await joint_fixture.event_subscriber.run(forever=False)
await joint_fixture.consume_event()

# verify that the resource was added
updated_resources = await joint_fixture.query_handler.handle_query(
updated_resources = await joint_fixture.handle_query(
class_name=CLASS_NAME, query="", filters=[]
)
if is_insert:
Expand All @@ -93,7 +93,7 @@ async def test_resource_upsert(
async def test_resource_delete(joint_fixture: JointFixture):
"""Test resource deletion via event consumption"""
# get all the documents in the collection
targeted_initial_results = await joint_fixture.query_handler.handle_query(
targeted_initial_results = await joint_fixture.handle_query(
class_name=CLASS_NAME,
query='"1HotelAlpha-id"',
filters=[],
Expand All @@ -113,10 +113,10 @@ async def test_resource_delete(joint_fixture: JointFixture):
)

# consume the event
await joint_fixture.event_subscriber.run(forever=False)
await joint_fixture.consume_event()

# get all the documents in the collection
results_post_delete = await joint_fixture.query_handler.handle_query(
results_post_delete = await joint_fixture.handle_query(
class_name=CLASS_NAME, query='"1HotelAlpha-id"', filters=[]
)

Expand Down
16 changes: 8 additions & 8 deletions tests/test_index_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,17 @@
async def test_index_creation(joint_fixture: JointFixture, create_index_manually: bool):
"""Test the index creation function."""
# indexes have been created in fixture setup, so delete them again
joint_fixture.empty_database()
joint_fixture.purge_database()

# verify collection does not exist
database = joint_fixture.mongodb.client[joint_fixture.config.db_name]
database = joint_fixture.mongodb_client[joint_fixture.config.db_name]
assert CLASS_NAME not in database.list_collection_names()

# reset the flag so it actually runs the indexing function
joint_fixture.query_handler._dao_collection._indexes_created = False # type: ignore
# let the query handler know that it needs to run the indexing function
joint_fixture.recreate_mongodb_indexes()

# make sure we do not get an error when trying to query non-existent collection
results_without_coll = await joint_fixture.query_handler.handle_query(
results_without_coll = await joint_fixture.handle_query(
class_name=CLASS_NAME,
query=QUERY_STRING,
filters=[],
Expand All @@ -54,7 +54,7 @@ async def test_index_creation(joint_fixture: JointFixture, create_index_manually
assert results_without_coll == models.QueryResults()

# create collection without index
joint_fixture.mongodb.client[joint_fixture.config.db_name].create_collection(
joint_fixture.mongodb_client[joint_fixture.config.db_name].create_collection(
CLASS_NAME
)

Expand All @@ -69,7 +69,7 @@ async def test_index_creation(joint_fixture: JointFixture, create_index_manually
)

# Verify querying empty collection with query string gives empty results model
results_without_coll = await joint_fixture.query_handler.handle_query(
results_without_coll = await joint_fixture.handle_query(
class_name=CLASS_NAME,
query=QUERY_STRING,
filters=[],
Expand All @@ -90,7 +90,7 @@ async def test_index_creation(joint_fixture: JointFixture, create_index_manually
assert any(index["name"] == f"$**_{TEXT}" for index in collection.list_indexes())

# verify that supplying a query string doesn't result in an error
results_with_coll = await joint_fixture.query_handler.handle_query(
results_with_coll = await joint_fixture.handle_query(
class_name=CLASS_NAME,
query=QUERY_STRING,
filters=[],
Expand Down
4 changes: 2 additions & 2 deletions tests/test_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ async def test_event_sub_logging(
Constants are defined above in an effort to keep code redundancy down.
"""
# get all the documents in the collection
all_results = await joint_fixture.query_handler.handle_query(
all_results = await joint_fixture.handle_query(
class_name="NestedData",
query="",
filters=[],
Expand All @@ -135,7 +135,7 @@ async def test_event_sub_logging(
caplog.clear()

# consume the event
await joint_fixture.event_subscriber.run(forever=False)
await joint_fixture.consume_event()

# examine logs and try to be specific by filtering by logger name
logs_of_interest = [
Expand Down
2 changes: 1 addition & 1 deletion tests/test_relevance.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def sorted_reference_results(
if not sorts:
sorts = [RELEVANCE_SORT, ID_ASC]

results = joint_fixture.mongodb.client[joint_fixture.config.db_name][
results = joint_fixture.mongodb_client[joint_fixture.config.db_name][
CLASS_NAME
].find({"$text": {"$search": query}}, {"score": {"$meta": "textScore"}})
results = [x for x in results] # type: ignore
Expand Down
Loading

0 comments on commit 8e9e71d

Please sign in to comment.