diff --git a/tests/conftest.py b/tests/conftest.py index 4bea3039..95a74de4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,8 +27,6 @@ def pytest_configure(config): os.environ["OPENEO_BACKEND_CONFIG"] = str(Path(__file__).parent / "backend_config.py") - - _DEFAULT_PROCESSES = [ "load_collection", "load_result", diff --git a/tests/metadata/test_merging.py b/tests/metadata/test_merging.py index 77b4deb5..20f51687 100644 --- a/tests/metadata/test_merging.py +++ b/tests/metadata/test_merging.py @@ -33,10 +33,10 @@ def test_merge_processes_minimal(self, merger, reporter): "parameters": [], "returns": {"schema": {}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [], + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } } assert reporter.logs == [] @@ -55,10 +55,10 @@ def test_merge_process_minimal(self, merger, reporter): "parameters": [], "returns": {"schema": {}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [] @@ -72,19 +72,12 @@ def test_merge_process_returns(self, merger, reporter): "type": "array", "items": {"description": "All data types are allowed."}, }, - "description": "some description" - } + "description": "some description", + }, }, "b2": { "id": "add", - "returns": { - "schema": { - "type": "array", - "items": { - "description": "Any data type is allowed." - } - } - } + "returns": {"schema": {"type": "array", "items": {"description": "Any data type is allowed."}}}, }, } ) @@ -93,7 +86,7 @@ def test_merge_process_returns(self, merger, reporter): "id": "add", "description": "add", "parameters": [], - 'returns': { + "returns": { "description": "some description", "schema": { "type": "array", @@ -101,10 +94,10 @@ def test_merge_process_returns(self, merger, reporter): }, }, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [] @@ -122,10 +115,10 @@ def test_merge_process_returns_difference(self, merger, reporter): "parameters": [], "returns": {"schema": {"type": "number"}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [ { @@ -175,10 +168,10 @@ def test_merge_process_exceptions(self, merger, reporter): "OverflowError": {"message": "Jeez"}, }, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [] @@ -203,10 +196,10 @@ def test_merge_process_exceptions_invalid(self, merger, reporter): "returns": {"schema": {}}, "exceptions": {"MathError": {"message": "Nope"}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [ { @@ -232,10 +225,10 @@ def test_merge_process_categories(self, merger, reporter): "returns": {"schema": {}}, "categories": ["Math", "Maths"], "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [] @@ -254,10 +247,10 @@ def test_merge_process_categories_invalid(self, merger, reporter): "returns": {"schema": {}}, "categories": ["Math"], "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [ { @@ -308,10 +301,10 @@ def test_merge_process_parameters_basic(self, merger, reporter): ], "returns": {"schema": {}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [] @@ -336,9 +329,7 @@ def test_merge_process_parameters_invalid(self, merger, reporter): "description": "cos", "federation:backends": ["b1", "b2"], "id": "cos", - "parameters": [ - {"name": "x", "schema": {"type": "number"}, "description": "x value"} - ], + "parameters": [{"name": "x", "schema": {"type": "number"}, "description": "x value"}], "returns": {"schema": {}}, "deprecated": False, "experimental": False, @@ -382,9 +373,7 @@ def test_merge_process_parameters_missing_required(self, merger, reporter): "description": "cos", "federation:backends": ["b1", "b2"], "id": "cos", - "parameters": [ - {"name": "x", "schema": {"type": "number"}, "description": "x value"} - ], + "parameters": [{"name": "x", "schema": {"type": "number"}, "description": "x value"}], "returns": {"schema": {}}, "deprecated": False, "experimental": False, @@ -441,14 +430,12 @@ def test_merge_process_parameters_invalid_listing(self, merger, reporter): "description": "cos", "federation:backends": ["b1", "b2"], "id": "cos", - "parameters": [ - {"name": "x", "schema": {"type": "number"}, "description": "x value"} - ], + "parameters": [{"name": "x", "schema": {"type": "number"}, "description": "x value"}], "returns": {"schema": {}}, - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [], + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [ { @@ -509,10 +496,10 @@ def test_merge_process_parameters_differences(self, merger, reporter): ], "returns": {"schema": {}}, "federation:backends": ["b1", "b2", "b3"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [], + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [ { @@ -540,7 +527,7 @@ def test_merge_process_parameters_differences(self, merger, reporter): '+ "type": "array"\n', " }\n", ], - } + }, ] def test_merge_process_parameters_differences_precedence(self, merger, reporter): @@ -632,9 +619,7 @@ def test_merge_process_parameters_recursive(self, merger, reporter): "schema": { "type": "object", "subtype": "process-graph", - "parameters": [ - {"name": "x", "schema": {}, "description": "the x"} - ], + "parameters": [{"name": "x", "schema": {}, "description": "the x"}], "returns": {"schema": {}}, }, }, @@ -679,9 +664,7 @@ def test_merge_process_parameters_recursive(self, merger, reporter): "schema": { "type": "object", "subtype": "process-graph", - "parameters": [ - {"name": "x", "description": "the x", "schema": {}} - ], + "parameters": [{"name": "x", "description": "the x", "schema": {}}], "returns": {"schema": {}}, }, } @@ -714,9 +697,7 @@ def test_merge_process_parameters_recursive2(self, merger, reporter): "description": "what to count", "schema": [ { - "parameters": [ - {"name": "x", "schema": {}, "description": "X."} - ], + "parameters": [{"name": "x", "schema": {}, "description": "X."}], "returns": {"schema": {"type": "boolean"}}, "subtype": "process-graph", "type": "object", @@ -789,9 +770,7 @@ def test_merge_process_parameters_recursive2(self, merger, reporter): { "type": "object", "subtype": "process-graph", - "parameters": [ - {"name": "x", "description": "X.", "schema": {}} - ], + "parameters": [{"name": "x", "description": "X.", "schema": {}}], "returns": {"schema": {"type": "boolean"}}, }, {"const": True, "type": "boolean"}, @@ -800,10 +779,10 @@ def test_merge_process_parameters_recursive2(self, merger, reporter): ], "returns": {"schema": {"type": "number"}}, "summary": "Count the number of elements", - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [], + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], } assert reporter.logs == [] diff --git a/tests/partitionedjobs/conftest.py b/tests/partitionedjobs/conftest.py index b7992ec1..e5ded41b 100644 --- a/tests/partitionedjobs/conftest.py +++ b/tests/partitionedjobs/conftest.py @@ -14,15 +14,9 @@ TEST_USER = "tstsr" TEST_USER_BEARER_TOKEN = "basic//" + HttpAuthHandler.build_basic_access_token(user_id=TEST_USER) -PG12 = { - "add": {"process_id": "add", "arguments": {"X": 1, "y": 2}, "result": True} -} -PG23 = { - "add": {"process_id": "add", "arguments": {"X": 2, "y": 3}, "result": True} -} -PG35 = { - "add": {"process_id": "add", "arguments": {"X": 3, "y": 5}, "result": True} -} +PG12 = {"add": {"process_id": "add", "arguments": {"X": 1, "y": 2}, "result": True}} +PG23 = {"add": {"process_id": "add", "arguments": {"X": 2, "y": 3}, "result": True}} +PG35 = {"add": {"process_id": "add", "arguments": {"X": 3, "y": 5}, "result": True}} P12 = {"process_graph": PG12} P23 = {"process_graph": PG23} P35 = {"process_graph": PG35} @@ -110,22 +104,21 @@ def get_job_status(self, user_id: str, job_id: str): return self.get_job_data(user_id, job_id).history[-1] def setup_assets(self, job_id: str, assets: List[str] = None): - """Mock `GET /jobs/{}/results` response with fake assets """ + """Mock `GET /jobs/{}/results` response with fake assets""" if assets is None: assets = ["preview.png", "result.tif"] - results = {"assets": { - a: {"href": self.backend_url + f"/jobs/{job_id}/results/{a}"} - for a in assets - }} + results = {"assets": {a: {"href": self.backend_url + f"/jobs/{job_id}/results/{a}"} for a in assets}} self.requests_mock.get(self.backend_url + f"/jobs/{job_id}/results", json=results) def _handle_get_jobs(self, request: requests.Request, context): user_id = self.get_user_id(request) - return {"jobs": [ - {"id": job_id, "created": timestamp_to_rfc3339(job_data.created), "status": job_data.history[-1]} - for (u, job_id), job_data in self.jobs.items() - if u == user_id - ]} + return { + "jobs": [ + {"id": job_id, "created": timestamp_to_rfc3339(job_data.created), "status": job_data.history[-1]} + for (u, job_id), job_data in self.jobs.items() + if u == user_id + ] + } def _handle_post_jobs(self, request: requests.Request, context): """`POST /jobs` handler (create job)""" diff --git a/tests/partitionedjobs/test_api.py b/tests/partitionedjobs/test_api.py index 0a6e66ed..db79eb1d 100644 --- a/tests/partitionedjobs/test_api.py +++ b/tests/partitionedjobs/test_api.py @@ -77,13 +77,16 @@ def _partitioned_job_tracking(self, zk_client): def test_create_job_basic(self, api100, zk_db, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "title": "3+5", - "description": "Addition of 3 and 5", - "process": P35, - "plan": "free", - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", + json={ + "title": "3+5", + "description": "Addition of 3 and 5", + "process": P35, + "plan": "free", + "job_options": {"split_strategy": "flimsy"}, + }, + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -115,16 +118,18 @@ def test_create_job_basic(self, api100, zk_db, dummy1): "progress": 0, } - assert zk_db.list_subjobs(user_id=TEST_USER, pjob_id=pjob_id) == {"0000": { - "backend_id": "b1", - "process_graph": PG35, - "title": "Partitioned job pj-20220119-123456 part 0000 (1/1)" - }} + assert zk_db.list_subjobs(user_id=TEST_USER, pjob_id=pjob_id) == { + "0000": { + "backend_id": "b1", + "process_graph": PG35, + "title": "Partitioned job pj-20220119-123456 part 0000 (1/1)", + } + } assert zk_db.get_backend_job_id(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == "1-jb-0" assert zk_db.get_sjob_status(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == { "status": "created", "message": approx_str_prefix("Created in 0:00"), - "timestamp": pytest.approx(self.now.epoch, abs=5) + "timestamp": pytest.approx(self.now.epoch, abs=5), } @now.mock @@ -133,10 +138,7 @@ def test_create_job_preprocessing(self, api100, zk_db, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) pg = {"load": {"process_id": "load_result", "arguments": {"id": "b1-b6tch-j08"}, "result": True}} - res = api100.post("/jobs", json={ - "process": {"process_graph": pg}, - "job_options": {"split_strategy": "flimsy"} - }) + res = api100.post("/jobs", json={"process": {"process_graph": pg}, "job_options": {"split_strategy": "flimsy"}}) res.assert_status_code(201) expected_job_id = "agg-pj-20220119-123456" @@ -151,10 +153,9 @@ def test_create_job_preprocessing(self, api100, zk_db, dummy1): def test_create_and_list_job(self, api100, zk_db, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -164,7 +165,7 @@ def test_create_and_list_job(self, api100, zk_db, dummy1): assert res.json == { "jobs": [ {"id": "b1-1-jb-0", "created": self.now.rfc3339, "status": "created"}, - {"id": expected_job_id, "created": self.now.rfc3339, "status": "created", "progress": 0} + {"id": expected_job_id, "created": self.now.rfc3339, "status": "created", "progress": 0}, ], "federation:missing": ["b2"], "links": [], @@ -173,13 +174,16 @@ def test_create_and_list_job(self, api100, zk_db, dummy1): def test_describe_wrong_user(self, api100, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "title": "3+5", - "description": "Addition of 3 and 5", - "process": P35, - "plan": "free", - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", + json={ + "title": "3+5", + "description": "Addition of 3 and 5", + "process": P35, + "plan": "free", + "job_options": {"split_strategy": "flimsy"}, + }, + ).assert_status_code(201) job_id = res.headers["OpenEO-Identifier"] res = api100.get(f"/jobs/{job_id}").assert_status_code(200) @@ -194,13 +198,16 @@ def test_create_job_failed_backend(self, api100, zk_db, requests_mock, dummy1): requests_mock.post(dummy1.backend_url + "/jobs", status_code=500, json={"code": "Internal", "message": "nope"}) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "title": "3+5", - "description": "Addition of 3 and 5", - "process": P35, - "plan": "free", - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", + json={ + "title": "3+5", + "description": "Addition of 3 and 5", + "process": P35, + "plan": "free", + "job_options": {"split_strategy": "flimsy"}, + }, + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -217,13 +224,17 @@ def test_create_job_failed_backend(self, api100, zk_db, requests_mock, dummy1): "progress": 0, } - assert zk_db.get_pjob_status(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet({ - "status": "error", - }) - assert zk_db.get_sjob_status(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == DictSubSet({ - "status": "error", - "message": "Create failed: [500] Internal: nope", - }) + assert zk_db.get_pjob_status(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet( + { + "status": "error", + } + ) + assert zk_db.get_sjob_status(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == DictSubSet( + { + "status": "error", + "message": "Create failed: [500] Internal: nope", + } + ) res = api100.get(f"/jobs/{expected_job_id}/logs").assert_status_code(200) assert res.json == { @@ -236,10 +247,9 @@ def test_start_job(self, api100, zk_db, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) # Submit job - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -254,26 +264,30 @@ def test_start_job(self, api100, zk_db, dummy1): res = api100.get(f"/jobs/{expected_job_id}").assert_status_code(200) assert res.json == DictSubSet({"id": expected_job_id, "status": "running", "progress": 0}) - assert zk_db.get_pjob_metadata(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet({ - "created": self.now.epoch, - "process": P35, - }) - assert zk_db.get_pjob_status(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet({ - "status": "running", - "message": approx_str_contains("{'running': 1}"), - }) + assert zk_db.get_pjob_metadata(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet( + { + "created": self.now.epoch, + "process": P35, + } + ) + assert zk_db.get_pjob_status(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet( + { + "status": "running", + "message": approx_str_contains("{'running': 1}"), + } + ) assert zk_db.get_backend_job_id(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == "1-jb-0" assert zk_db.get_sjob_status(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == DictSubSet( - {"status": "running"}) + {"status": "running"} + ) def test_start_job_wrong_user(self, api100, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) # Submit job - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) job_id = res.headers["OpenEO-Identifier"] res = api100.get(f"/jobs/{job_id}").assert_status_code(200) @@ -288,10 +302,9 @@ def test_sync_job(self, api100, zk_db, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) # Submit job - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -306,35 +319,39 @@ def test_sync_job(self, api100, zk_db, dummy1): assert res.json == DictSubSet({"id": expected_job_id, "status": "running", "progress": 0}) # Status check: still running - dummy1.set_job_status(TEST_USER, '1-jb-0', "running") + dummy1.set_job_status(TEST_USER, "1-jb-0", "running") res = api100.get(f"/jobs/{expected_job_id}").assert_status_code(200) assert res.json == DictSubSet({"id": expected_job_id, "status": "running", "progress": 0}) # Status check: finished - dummy1.set_job_status(TEST_USER, '1-jb-0', "finished") + dummy1.set_job_status(TEST_USER, "1-jb-0", "finished") res = api100.get(f"/jobs/{expected_job_id}").assert_status_code(200) assert res.json == DictSubSet({"id": expected_job_id, "status": "finished", "progress": 100}) - assert zk_db.get_pjob_metadata(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet({ - "created": self.now.epoch, - "process": P35, - }) - assert zk_db.get_pjob_status(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet({ - "status": "finished", - "message": approx_str_contains("{'finished': 1}"), - }) + assert zk_db.get_pjob_metadata(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet( + { + "created": self.now.epoch, + "process": P35, + } + ) + assert zk_db.get_pjob_status(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet( + { + "status": "finished", + "message": approx_str_contains("{'finished': 1}"), + } + ) assert zk_db.get_backend_job_id(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == "1-jb-0" assert zk_db.get_sjob_status(user_id=TEST_USER, pjob_id=pjob_id, sjob_id="0000") == DictSubSet( - {"status": "finished"}) + {"status": "finished"} + ) def test_sync_job_wrong_user(self, api100, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) # Submit job - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) job_id = res.headers["OpenEO-Identifier"] # Start job @@ -355,10 +372,9 @@ def test_job_results(self, api100, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) # Submit job - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) expected_job_id = "agg-pj-20220119-123456" assert res.headers["OpenEO-Identifier"] == expected_job_id @@ -377,23 +393,24 @@ def test_job_results(self, api100, dummy1): dummy1.setup_assets(job_id="1-jb-0", assets=["preview.png", "res001.tif", "res002.tif"]) res = api100.get(f"/jobs/{expected_job_id}/results").assert_status_code(200) - assert res.json == DictSubSet({ - "id": expected_job_id, - "assets": { - "0000-preview.png": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/preview.png"}), - "0000-res001.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/res001.tif"}), - "0000-res002.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/res002.tif"}), + assert res.json == DictSubSet( + { + "id": expected_job_id, + "assets": { + "0000-preview.png": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/preview.png"}), + "0000-res001.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/res001.tif"}), + "0000-res002.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/res002.tif"}), + }, } - }) + ) def test_job_results_wrong_user(self, api100, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) # Submit job - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) job_id = res.headers["OpenEO-Identifier"] # Start job @@ -410,18 +427,22 @@ def test_job_results_wrong_user(self, api100, dummy1): @now.mock def test_get_logs(self, api100, requests_mock, dummy1): - requests_mock.get(dummy1.backend_url + "/jobs/1-jb-0/logs", json={ - "logs": [{"id": "123", "level": "info", "message": "Created job. You're welcome."}] - }) + requests_mock.get( + dummy1.backend_url + "/jobs/1-jb-0/logs", + json={"logs": [{"id": "123", "level": "info", "message": "Created job. You're welcome."}]}, + ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "title": "3+5", - "description": "Addition of 3 and 5", - "process": P35, - "plan": "free", - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", + json={ + "title": "3+5", + "description": "Addition of 3 and 5", + "process": P35, + "plan": "free", + "job_options": {"split_strategy": "flimsy"}, + }, + ).assert_status_code(201) expected_job_id = "agg-pj-20220119-123456" assert res.headers["OpenEO-Identifier"] == expected_job_id @@ -434,10 +455,9 @@ def test_get_logs(self, api100, requests_mock, dummy1): @now.mock def test_get_logs_wrong_user(self, api100, requests_mock, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "process": P35, - "job_options": {"split_strategy": "flimsy"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": P35, "job_options": {"split_strategy": "flimsy"}} + ).assert_status_code(201) expected_job_id = "agg-pj-20220119-123456" assert res.headers["OpenEO-Identifier"] == expected_job_id @@ -455,16 +475,15 @@ class TestTileGridBatchJobSplitting: "id": "S2", # covers 9 (3x3) utm-10km tiles "spatial_extent": {"west": 4.9, "south": 51.1, "east": 5.2, "north": 51.3}, - } + }, }, "sr": { "process_id": "save_result", "arguments": {"data": {"from_node": "lc"}, "format": "GTiff"}, "result": True, - } + }, } - @pytest.fixture(autouse=True) def _partitioned_job_tracking(self, zk_client): with config_overrides(partitioned_job_tracking={"zk_client": zk_client}): @@ -474,12 +493,15 @@ def _partitioned_job_tracking(self, zk_client): def test_create_job_basic(self, flask_app, api100, zk_db, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "title": "Mol", - "process": {"process_graph": self.PG_MOL}, - "plan": "free", - "job_options": {"tile_grid": "utm-10km"} - }).assert_status_code(201) + res = api100.post( + "/jobs", + json={ + "title": "Mol", + "process": {"process_graph": self.PG_MOL}, + "plan": "free", + "job_options": {"tile_grid": "utm-10km"}, + }, + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -496,18 +518,19 @@ def test_create_job_basic(self, flask_app, api100, zk_db, dummy1): "progress": 0, } - assert zk_db.get_pjob_metadata(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet({ - "user_id": TEST_USER, - "created": self.now.epoch, - "process": {"process_graph": self.PG_MOL}, - "metadata": { - "title": "Mol", "plan": "free", - "_tiling_geometry": DictSubSet({ - "global_spatial_extent": DictSubSet({"west": 4.9}) - }), - }, - "job_options": {"tile_grid": "utm-10km"}, - }) + assert zk_db.get_pjob_metadata(user_id=TEST_USER, pjob_id=pjob_id) == DictSubSet( + { + "user_id": TEST_USER, + "created": self.now.epoch, + "process": {"process_graph": self.PG_MOL}, + "metadata": { + "title": "Mol", + "plan": "free", + "_tiling_geometry": DictSubSet({"global_spatial_extent": DictSubSet({"west": 4.9})}), + }, + "job_options": {"tile_grid": "utm-10km"}, + } + ) assert zk_db.get_pjob_status(user_id=TEST_USER, pjob_id=pjob_id) == { "status": "created", "message": approx_str_contains("{'created': 9}"), @@ -520,7 +543,8 @@ def test_create_job_basic(self, flask_app, api100, zk_db, dummy1): tiles = [] for sjob_id, subjob_metadata in subjobs.items(): assert zk_db.get_sjob_status(user_id=TEST_USER, pjob_id=pjob_id, sjob_id=sjob_id) == DictSubSet( - {"status": "created"}) + {"status": "created"} + ) job_id = zk_db.get_backend_job_id(user_id=TEST_USER, pjob_id=pjob_id, sjob_id=sjob_id) dummy_jobs.append(job_id) assert dummy1.get_job_status(TEST_USER, job_id) == "created" @@ -541,20 +565,22 @@ def test_create_job_preprocessing(self, flask_app, api100, zk_db, dummy1): # Process graph with load_result pg = { "lr": {"process_id": "load_result", "arguments": {"id": "b1-b6tch-j08"}}, - "fb": {"process_id": "filter_bbox", "arguments": { - "data": {"from_node": "lr"}, - "extent": {"west": 4.9, "south": 51.1, "east": 4.91, "north": 51.11}, - }}, + "fb": { + "process_id": "filter_bbox", + "arguments": { + "data": {"from_node": "lr"}, + "extent": {"west": 4.9, "south": 51.1, "east": 4.91, "north": 51.11}, + }, + }, "sr": { "process_id": "save_result", "arguments": {"data": {"from_node": "fb"}, "format": "GTiff"}, "result": True, - } + }, } - res = api100.post("/jobs", json={ - "process": {"process_graph": pg}, - "job_options": {"tile_grid": "utm-10km"} - }).assert_status_code(201) + res = api100.post( + "/jobs", json={"process": {"process_graph": pg}, "job_options": {"tile_grid": "utm-10km"}} + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -568,12 +594,15 @@ def test_create_job_preprocessing(self, flask_app, api100, zk_db, dummy1): def test_job_results_basic(self, flask_app, api100, dummy1): api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "title": "Mol", - "process": {"process_graph": self.PG_MOL}, - "plan": "free", - "job_options": {"tile_grid": "utm-10km"} - }).assert_status_code(201) + res = api100.post( + "/jobs", + json={ + "title": "Mol", + "process": {"process_graph": self.PG_MOL}, + "plan": "free", + "job_options": {"tile_grid": "utm-10km"}, + }, + ).assert_status_code(201) pjob_id = "pj-20220119-123456" expected_job_id = f"agg-{pjob_id}" @@ -609,28 +638,34 @@ def test_job_results_basic(self, flask_app, api100, dummy1): # Get results res = api100.get(f"/jobs/{expected_job_id}/results").assert_status_code(200) - assert res.json == DictSubSet({ - "id": expected_job_id, - "assets": { - "0000-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/result.tif"}), - "0001-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-1/results/result.tif"}), - "0002-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-2/results/result.tif"}), - "0003-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-3/results/result.tif"}), - "0004-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-4/results/result.tif"}), - "0005-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-5/results/result.tif"}), - "0006-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-6/results/result.tif"}), - "0007-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-7/results/result.tif"}), - "0008-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-8/results/result.tif"}), - "tile_grid.geojson": DictSubSet({ - "href": "http://oeoa.test/openeo/1.0.0/jobs/agg-pj-20220119-123456/results/assets/tile_grid.geojson", - "type": "application/geo+json", - }) - }, - "geometry": DictSubSet({ - "type": "GeometryCollection", - "geometries": [DictSubSet({"type": "Polygon"}), DictSubSet({"type": "MultiPolygon"})] - }) - }) + assert res.json == DictSubSet( + { + "id": expected_job_id, + "assets": { + "0000-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-0/results/result.tif"}), + "0001-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-1/results/result.tif"}), + "0002-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-2/results/result.tif"}), + "0003-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-3/results/result.tif"}), + "0004-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-4/results/result.tif"}), + "0005-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-5/results/result.tif"}), + "0006-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-6/results/result.tif"}), + "0007-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-7/results/result.tif"}), + "0008-result.tif": DictSubSet({"href": dummy1.backend_url + "/jobs/1-jb-8/results/result.tif"}), + "tile_grid.geojson": DictSubSet( + { + "href": "http://oeoa.test/openeo/1.0.0/jobs/agg-pj-20220119-123456/results/assets/tile_grid.geojson", + "type": "application/geo+json", + } + ), + }, + "geometry": DictSubSet( + { + "type": "GeometryCollection", + "geometries": [DictSubSet({"type": "Polygon"}), DictSubSet({"type": "MultiPolygon"})], + } + ), + } + ) res = api100.get("/jobs/agg-pj-20220119-123456/results/assets/tile_grid.geojson").assert_status_code(200) assert res.json == DictSubSet({"type": "FeatureCollection"}) diff --git a/tests/partitionedjobs/test_crossbackend.py b/tests/partitionedjobs/test_crossbackend.py index 9c26740e..b4b62393 100644 --- a/tests/partitionedjobs/test_crossbackend.py +++ b/tests/partitionedjobs/test_crossbackend.py @@ -21,9 +21,7 @@ class TestCrossBackendSplitter: def test_split_simple(self): - process_graph = { - "add": {"process_id": "add", "arguments": {"x": 3, "y": 5}, "result": True} - } + process_graph = {"add": {"process_id": "add", "arguments": {"x": 3, "y": 5}, "result": True}} splitter = CrossBackendSplitter(backend_for_collection=lambda cid: "foo") res = splitter.split({"process_graph": process_graph}) @@ -54,9 +52,7 @@ def test_split_basic(self): "result": True, }, } - splitter = CrossBackendSplitter( - backend_for_collection=lambda cid: cid.split("_")[0] - ) + splitter = CrossBackendSplitter(backend_for_collection=lambda cid: cid.split("_")[0]) res = splitter.split({"process_graph": process_graph}) assert res.subjobs == { @@ -277,20 +273,12 @@ def setup_requests_mock(self, requests_mock: requests_mock.Mocker): requests_mock.get(f"{self.url}/", json={"api_version": "1.1.0"}) requests_mock.post(f"{self.url}/jobs", text=self._handle_job_create) requests_mock.get(re.compile("/jobs/([^/]*)$"), json=self._handle_job_status) - requests_mock.post( - re.compile("/jobs/([^/]*)/results$"), text=self._handle_job_start - ) + requests_mock.post(re.compile("/jobs/([^/]*)/results$"), text=self._handle_job_start) def _handle_job_create(self, request: requests.Request, context): pg = request.json()["process"]["process_graph"] # Determine job id based on used collection id - cids = "-".join( - sorted( - n["arguments"]["id"] - for n in pg.values() - if n["process_id"] == "load_collection" - ) - ) + cids = "-".join(sorted(n["arguments"]["id"] for n in pg.values() if n["process_id"] == "load_collection")) assert cids job_id = f"job-{cids}".lower() if job_id in self.jobs: @@ -383,9 +371,7 @@ def test_basic(self, aggregator: _FakeAggregator): "result": True, }, } - splitter = CrossBackendSplitter( - backend_for_collection=lambda cid: cid.split("_")[0] - ) + splitter = CrossBackendSplitter(backend_for_collection=lambda cid: cid.split("_")[0]) pjob: PartitionedJob = splitter.split({"process_graph": process_graph}) connection = openeo.Connection(aggregator.url) diff --git a/tests/partitionedjobs/test_splitting.py b/tests/partitionedjobs/test_splitting.py index ae01f7ec..f19d4215 100644 --- a/tests/partitionedjobs/test_splitting.py +++ b/tests/partitionedjobs/test_splitting.py @@ -33,70 +33,81 @@ def test_flimsy_splitter(multi_backend_connection, catalog): class TestTileGridSplitter: - @pytest.fixture def aggregator_processing(self, multi_backend_connection, catalog, requests_mock, backend1) -> AggregatorProcessing: requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get(backend1 + "/collections/S2", json={"id": "S2"}) return AggregatorProcessing(backends=multi_backend_connection, catalog=catalog) - @pytest.mark.parametrize(["west", "south", "tile_grid", "expected_extent"], [ - # >>> from pyproj import Transformer - # >>> Transformer.from_crs("epsg:4326", "epsg:32631", always_xy=True).transform(5, 51) - # (640333.2963383198, 5651728.68267166) - ( - 5, 51, "utm-100km", - {"west": 600_000, "south": 5_600_000, "east": 700_000, "north": 5_700_000, "crs": "epsg:32631"} - ), - ( - 5, 51, "utm-20km", - {"west": 640_000, "south": 5_640_000, "east": 660_000, "north": 5_660_000, "crs": "epsg:32631"} - ), - ( - 5, 51, "utm-10km", - {"west": 640_000, "south": 5_650_000, "east": 650_000, "north": 5_660_000, "crs": "epsg:32631"} - ), - ( - 5, 51, "wgs84-1degree", - {"west": 5, "south": 51, "east": 6, "north": 52, "crs": "epsg:4326"} - ), - # >>> Transformer.from_crs("epsg:4326", "epsg:32633", always_xy=True).transform(12.3, 45.6) - # (289432.90485397115, 5053152.380961399) - ( - 12.3, 45.6, "utm-20km", - {"west": 280_000, "south": 5_040_000, "east": 300_000, "north": 5_060_000, "crs": "epsg:32633"} - ), - # >>> Transformer.from_crs("epsg:4326", "epsg:32724", always_xy=True).transform(-42, -5) - # (167286.20126155682, 9446576.013116669) - ( - -42, -5, "utm-10km", - {"west": 160_000, "south": 9_440_000, "east": 170_000, "north": 9_450_000, "crs": "epsg:32724"} - ), - ]) + @pytest.mark.parametrize( + ["west", "south", "tile_grid", "expected_extent"], + [ + # >>> from pyproj import Transformer + # >>> Transformer.from_crs("epsg:4326", "epsg:32631", always_xy=True).transform(5, 51) + # (640333.2963383198, 5651728.68267166) + ( + 5, + 51, + "utm-100km", + {"west": 600_000, "south": 5_600_000, "east": 700_000, "north": 5_700_000, "crs": "epsg:32631"}, + ), + ( + 5, + 51, + "utm-20km", + {"west": 640_000, "south": 5_640_000, "east": 660_000, "north": 5_660_000, "crs": "epsg:32631"}, + ), + ( + 5, + 51, + "utm-10km", + {"west": 640_000, "south": 5_650_000, "east": 650_000, "north": 5_660_000, "crs": "epsg:32631"}, + ), + (5, 51, "wgs84-1degree", {"west": 5, "south": 51, "east": 6, "north": 52, "crs": "epsg:4326"}), + # >>> Transformer.from_crs("epsg:4326", "epsg:32633", always_xy=True).transform(12.3, 45.6) + # (289432.90485397115, 5053152.380961399) + ( + 12.3, + 45.6, + "utm-20km", + {"west": 280_000, "south": 5_040_000, "east": 300_000, "north": 5_060_000, "crs": "epsg:32633"}, + ), + # >>> Transformer.from_crs("epsg:4326", "epsg:32724", always_xy=True).transform(-42, -5) + # (167286.20126155682, 9446576.013116669) + ( + -42, + -5, + "utm-10km", + {"west": 160_000, "south": 9_440_000, "east": 170_000, "north": 9_450_000, "crs": "epsg:32724"}, + ), + ], + ) def test_simple_small_coverage(self, aggregator_processing, tile_grid, west, south, expected_extent): """load_collection with very small spatial extent that should only cover one tile""" splitter = TileGridSplitter(processing=aggregator_processing) - process = {"process_graph": { - "lc": { - "process_id": "load_collection", - "arguments": {"id": "S2", "spatial_extent": { - "west": west, "south": south, - "east": west + 0.001, "north": south + 0.001 - }}, - }, - "sr": { - "process_id": "save_result", - "arguments": {"data": {"from_node": "lc"}, "format": "GTiff"}, - "result": True - }, - }} + process = { + "process_graph": { + "lc": { + "process_id": "load_collection", + "arguments": { + "id": "S2", + "spatial_extent": {"west": west, "south": south, "east": west + 0.001, "north": south + 0.001}, + }, + }, + "sr": { + "process_id": "save_result", + "arguments": {"data": {"from_node": "lc"}, "format": "GTiff"}, + "result": True, + }, + } + } pjob = splitter.split(process=process, job_options={"tile_grid": tile_grid}) assert len(pjob.subjobs) == 1 ((sjob_id, sjob),) = pjob.subjobs.items() new_process_graph = sjob.process_graph assert len(new_process_graph) == 3 - new_node_id, = set(new_process_graph.keys()).difference(process["process_graph"].keys()) + (new_node_id,) = set(new_process_graph.keys()).difference(process["process_graph"].keys()) assert new_process_graph["lc"] == process["process_graph"]["lc"] assert new_process_graph[new_node_id] == { "process_id": "filter_bbox", @@ -105,29 +116,34 @@ def test_simple_small_coverage(self, aggregator_processing, tile_grid, west, sou assert new_process_graph["sr"] == { "process_id": "save_result", "arguments": {"data": {"from_node": new_node_id}, "format": "GTiff"}, - "result": True + "result": True, } - @pytest.mark.parametrize(["spatial_extent", "tile_grid", "expected_tile_count", "expected_utm_crs"], [ - (dict(west=7.2, south=50, east=7.9, north=50.7), "utm-100km", 4, "epsg:32632"), - (dict(west=7.2, south=50, east=7.9, north=50.7), "utm-20km", 20, "epsg:32632"), - (dict(west=7.2, south=50, east=7.9, north=50.7), "utm-10km", 54, "epsg:32632"), - (dict(west=-55, south=-33, east=-50, north=-31), "utm-100km", 15, "epsg:32722"), - (dict(west=-82, south=31, east=-81, north=32), "utm-20km", 42, "epsg:32617"), - ]) + @pytest.mark.parametrize( + ["spatial_extent", "tile_grid", "expected_tile_count", "expected_utm_crs"], + [ + (dict(west=7.2, south=50, east=7.9, north=50.7), "utm-100km", 4, "epsg:32632"), + (dict(west=7.2, south=50, east=7.9, north=50.7), "utm-20km", 20, "epsg:32632"), + (dict(west=7.2, south=50, east=7.9, north=50.7), "utm-10km", 54, "epsg:32632"), + (dict(west=-55, south=-33, east=-50, north=-31), "utm-100km", 15, "epsg:32722"), + (dict(west=-82, south=31, east=-81, north=32), "utm-20km", 42, "epsg:32617"), + ], + ) def test_basic(self, aggregator_processing, spatial_extent, tile_grid, expected_tile_count, expected_utm_crs): splitter = TileGridSplitter(processing=aggregator_processing) - process = {"process_graph": { - "lc": { - "process_id": "load_collection", - "arguments": {"id": "S2", "spatial_extent": spatial_extent}, - }, - "sr": { - "process_id": "save_result", - "arguments": {"data": {"from_node": "lc"}, "format": "GTiff"}, - "result": True - }, - }} + process = { + "process_graph": { + "lc": { + "process_id": "load_collection", + "arguments": {"id": "S2", "spatial_extent": spatial_extent}, + }, + "sr": { + "process_id": "save_result", + "arguments": {"data": {"from_node": "lc"}, "format": "GTiff"}, + "result": True, + }, + } + } pjob = splitter.split(process=process, job_options={"tile_grid": tile_grid, "max_tiles": 64}) assert len(pjob.subjobs) == expected_tile_count @@ -137,12 +153,12 @@ def test_basic(self, aggregator_processing, spatial_extent, tile_grid, expected_ for subjob_id, subjob in pjob.subjobs.items(): new_process_graph = subjob.process_graph assert len(new_process_graph) == 3 - new_node_id, = set(new_process_graph.keys()).difference(process["process_graph"].keys()) + (new_node_id,) = set(new_process_graph.keys()).difference(process["process_graph"].keys()) assert new_process_graph["lc"] == process["process_graph"]["lc"] assert new_process_graph["sr"] == { "process_id": "save_result", "arguments": {"data": {"from_node": new_node_id}, "format": "GTiff"}, - "result": True + "result": True, } assert new_process_graph[new_node_id]["process_id"] == "filter_bbox" assert new_process_graph[new_node_id]["arguments"]["data"] == {"from_node": "lc"} diff --git a/tests/partitionedjobs/test_tracking.py b/tests/partitionedjobs/test_tracking.py index e8b9d306..cc97a409 100644 --- a/tests/partitionedjobs/test_tracking.py +++ b/tests/partitionedjobs/test_tracking.py @@ -49,30 +49,27 @@ def dummy2(backend2, requests_mock, test_user) -> DummyBackend: class TestPartitionedJobTracker: - def test_create(self, pjob, zk_db, zk_tracker, flask_request, dummy1, dummy2, john): pjob_id = zk_tracker.create(pjob=pjob, user_id=john, flask_request=flask_request) - assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet({ - "status": "created", - "message": approx_str_contains("{'created': 2}"), - }) + assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet( + { + "status": "created", + "message": approx_str_contains("{'created': 2}"), + } + ) subjobs = zk_db.list_subjobs(user_id=john, pjob_id=pjob_id) assert set(subjobs.keys()) == {"0000", "0001"} for sjob_id in subjobs: - assert zk_db.get_sjob_status(user_id=john, pjob_id=pjob_id, sjob_id=sjob_id) == DictSubSet({ - "status": "created", - "message": approx_str_prefix("Created in 0:00"), - }) - assert dummy1.jobs[john, "1-jb-0"].create == { - "process": P12, - "title": approx_str_contains("part 0000 (1/2)") - } + assert zk_db.get_sjob_status(user_id=john, pjob_id=pjob_id, sjob_id=sjob_id) == DictSubSet( + { + "status": "created", + "message": approx_str_prefix("Created in 0:00"), + } + ) + assert dummy1.jobs[john, "1-jb-0"].create == {"process": P12, "title": approx_str_contains("part 0000 (1/2)")} assert dummy1.jobs[john, "1-jb-0"].history == ["created"] - assert dummy2.jobs[john, "2-jb-0"].create == { - "process": P23, - "title": approx_str_contains("part 0001 (2/2)") - } + assert dummy2.jobs[john, "2-jb-0"].create == {"process": P23, "title": approx_str_contains("part 0001 (2/2)")} assert dummy2.jobs[john, "2-jb-0"].history == ["created"] def test_create_error(self, pjob, zk_db, zk_tracker, flask_request, dummy1, dummy2, requests_mock, john): @@ -103,24 +100,30 @@ def test_create_error(self, pjob, zk_db, zk_tracker, flask_request, dummy1, dumm def test_start(self, pjob, zk_db, zk_tracker, flask_request, dummy1, dummy2, john): # Create pjob_id = zk_tracker.create(pjob=pjob, user_id=john, flask_request=flask_request) - assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet({ - "status": "created", - "message": approx_str_contains("{'created': 2}"), - }) + assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet( + { + "status": "created", + "message": approx_str_contains("{'created': 2}"), + } + ) # Start zk_tracker.start_sjobs(pjob_id=pjob_id, user_id=john, flask_request=flask_request) - assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet({ - "status": "running", - "message": approx_str_contains("{'running': 2}"), - }) + assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet( + { + "status": "running", + "message": approx_str_contains("{'running': 2}"), + } + ) subjobs = zk_db.list_subjobs(user_id=john, pjob_id=pjob_id) assert set(subjobs.keys()) == {"0000", "0001"} for sjob_id in subjobs: - assert zk_db.get_sjob_status(user_id=john, pjob_id=pjob_id, sjob_id=sjob_id) == DictSubSet({ - "status": "running", - "message": approx_str_prefix("Started in 0:00"), - }) + assert zk_db.get_sjob_status(user_id=john, pjob_id=pjob_id, sjob_id=sjob_id) == DictSubSet( + { + "status": "running", + "message": approx_str_prefix("Started in 0:00"), + } + ) assert dummy1.jobs[john, "1-jb-0"].history == ["created", "running"] assert dummy2.jobs[john, "2-jb-0"].history == ["created", "running"] @@ -128,10 +131,12 @@ def test_start(self, pjob, zk_db, zk_tracker, flask_request, dummy1, dummy2, joh def test_start_wrong_user(self, pjob, zk_db, zk_tracker, flask_request, dummy1, dummy2, john): # Create pjob_id = zk_tracker.create(pjob=pjob, user_id=john, flask_request=flask_request) - assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet({ - "status": "created", - "message": approx_str_contains("{'created': 2}"), - }) + assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet( + { + "status": "created", + "message": approx_str_contains("{'created': 2}"), + } + ) with pytest.raises(JobNotFoundException): zk_tracker.start_sjobs(pjob_id=pjob_id, user_id="notjohn", flask_request=flask_request) @@ -285,10 +290,12 @@ def test_sync_with_error_and_recover(self, pjob, zk_db, zk_tracker, flask_reques dummy2.set_job_status(john, "2-jb-0", "error") zk_tracker.sync(pjob_id=pjob_id, user_id=john, flask_request=flask_request) - assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet({ - "status": "running", - "message": approx_str_contains("{'running': 1, 'error': 1}"), - }) + assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet( + { + "status": "running", + "message": approx_str_contains("{'running': 1, 'error': 1}"), + } + ) subjobs = zk_db.list_subjobs(user_id=john, pjob_id=pjob_id) assert set(subjobs.keys()) == {"0000", "0001"} assert zk_db.get_sjob_status(user_id=john, pjob_id=pjob_id, sjob_id="0000") == DictSubSet({"status": "running"}) @@ -299,10 +306,12 @@ def test_sync_with_error_and_recover(self, pjob, zk_db, zk_tracker, flask_reques dummy2.set_job_status(john, "2-jb-0", "running") zk_tracker.sync(pjob_id=pjob_id, user_id=john, flask_request=flask_request) - assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet({ - "status": "running", - "message": approx_str_contains("{'running': 2}"), - }) + assert zk_db.get_pjob_status(user_id=john, pjob_id=pjob_id) == DictSubSet( + { + "status": "running", + "message": approx_str_contains("{'running': 2}"), + } + ) subjobs = zk_db.list_subjobs(user_id=john, pjob_id=pjob_id) assert set(subjobs.keys()) == {"0000", "0001"} assert zk_db.get_sjob_status(user_id=john, pjob_id=pjob_id, sjob_id="0000") == DictSubSet({"status": "running"}) @@ -325,7 +334,6 @@ def test_sync_wrong_user(self, pjob, zk_db, zk_tracker, flask_request, dummy1, d class TestPartitionedJobConnection: - def test_authenticated_from_request(self, zk_tracker): con = PartitionedJobConnection(partitioned_job_tracker=zk_tracker) assert con._flask_request is None diff --git a/tests/partitionedjobs/test_zookeeper.py b/tests/partitionedjobs/test_zookeeper.py index 923e044d..08348eae 100644 --- a/tests/partitionedjobs/test_zookeeper.py +++ b/tests/partitionedjobs/test_zookeeper.py @@ -18,7 +18,6 @@ @clock_mock("2022-01-17T17:48:00Z") class TestZooKeeperPartitionedJobDB: - def test_connect_context_manager_basic(self): client = mock.Mock() zk_db = ZooKeeperPartitionedJobDB(client=client) @@ -73,7 +72,7 @@ def test_insert_basic(self, pjob, zk_client, zk_db): "/o-a/tstsr/pj-20220117-174800/sjobs/0000": { "process_graph": PG12, "backend_id": "b1", - "title": "Partitioned job pj-20220117-174800 part 0000 (1/2)" + "title": "Partitioned job pj-20220117-174800 part 0000 (1/2)", }, "/o-a/tstsr/pj-20220117-174800/sjobs/0000/status": { "status": "inserted", @@ -83,7 +82,7 @@ def test_insert_basic(self, pjob, zk_client, zk_db): "/o-a/tstsr/pj-20220117-174800/sjobs/0001": { "process_graph": PG23, "backend_id": "b2", - "title": "Partitioned job pj-20220117-174800 part 0001 (2/2)" + "title": "Partitioned job pj-20220117-174800 part 0001 (2/2)", }, "/o-a/tstsr/pj-20220117-174800/sjobs/0001/status": { "status": "inserted", @@ -127,12 +126,12 @@ def test_list_subjobs(self, pjob, zk_db): "0000": { "process_graph": PG12, "backend_id": "b1", - "title": "Partitioned job pj-20220117-174800 part 0000 (1/2)" + "title": "Partitioned job pj-20220117-174800 part 0000 (1/2)", }, "0001": { "process_graph": PG23, "backend_id": "b2", - "title": "Partitioned job pj-20220117-174800 part 0001 (2/2)" + "title": "Partitioned job pj-20220117-174800 part 0001 (2/2)", }, } @@ -160,8 +159,7 @@ def test_set_get_pjob_status(self, pjob, zk_db): assert status == {"status": "running", "message": "goin' on", "timestamp": approx_now(), "progress": None} zk_db.set_pjob_status( - user_id=TEST_USER, pjob_id="pj-20220117-174800", status="running", message="goin' on", - progress=45 + user_id=TEST_USER, pjob_id="pj-20220117-174800", status="running", message="goin' on", progress=45 ) status = zk_db.get_pjob_status(user_id=TEST_USER, pjob_id="pj-20220117-174800") assert status == {"status": "running", "message": "goin' on", "timestamp": approx_now(), "progress": 45} @@ -176,8 +174,7 @@ def test_set_get_sjob_status(self, pjob, zk_db): assert status == {"status": "inserted", "message": None, "timestamp": approx_now()} zk_db.set_sjob_status( - user_id=TEST_USER, pjob_id="pj-20220117-174800", sjob_id="0000", status="running", - message="goin' on" + user_id=TEST_USER, pjob_id="pj-20220117-174800", sjob_id="0000", status="running", message="goin' on" ) status = zk_db.get_sjob_status(user_id=TEST_USER, pjob_id="pj-20220117-174800", sjob_id="0000") assert status == {"status": "running", "message": "goin' on", "timestamp": approx_now()} diff --git a/tests/test_backend.py b/tests/test_backend.py index 7f388f38..c5ebe76e 100644 --- a/tests/test_backend.py +++ b/tests/test_backend.py @@ -59,7 +59,7 @@ def test_oidc_providers_new_config_support(self, multi_backend_connection, backe def test_file_formats_simple(self, multi_backend_connection, backend1, backend2, requests_mock): just_geotiff = { "input": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}}, - "output": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}} + "output": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}}, } requests_mock.get(backend1 + "/file_formats", json=just_geotiff) requests_mock.get(backend2 + "/file_formats", json=just_geotiff) @@ -79,7 +79,7 @@ def test_file_formats_caching( ): just_geotiff = { "input": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}}, - "output": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}} + "output": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}}, } mock1 = requests_mock.get(backend1 + "/file_formats", json=just_geotiff) mock2 = requests_mock.get(backend2 + "/file_formats", json=just_geotiff) @@ -147,12 +147,14 @@ def test_file_formats_merging(self, multi_backend_connection, backend1, backend2 "GTiff": { "gis_data_types": ["raster"], # TODO: merge parameters of backend1 and backend2? - "parameters": {"ZLEVEL": {"type": "string", "default": "6"}, }, - "title": "GeoTiff" + "parameters": { + "ZLEVEL": {"type": "string", "default": "6"}, + }, + "title": "GeoTiff", }, "JSON": {"gis_data_types": ["raster"], "parameters": {}}, "netCDF": {"gis_data_types": ["raster"], "parameters": {}, "title": "netCDF"}, - } + }, } @@ -166,9 +168,8 @@ class TestAggregatorSecondaryServices: "configuration": { "colormap": { "default": "YlGn", - "description": - "The colormap to apply to single band layers", - "type": "string" + "description": "The colormap to apply to single band layers", + "type": "string", }, "version": { "default": "1.0.0", @@ -340,20 +341,19 @@ def test_service_types_multiple_backends( "configuration": { "colormap": { "default": "YlGn", - "description": - "The colormap to apply to single band layers", - "type": "string" + "description": "The colormap to apply to single band layers", + "type": "string", }, "version": { "default": "1.0.0", "description": "The WMTS version to use.", "enum": ["1.0.0"], - "type": "string" - } + "type": "string", + }, }, "links": [], "process_parameters": [], - "title": "Web Map Tile Service" + "title": "Web Map Tile Service", } } service_type_2 = { @@ -392,19 +392,14 @@ def test_service_types_warns_about_duplicate_service( requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) requests_mock.get(backend2 + "/", json=mbldr.capabilities(secondary_services=True)) service_type_1 = { - "WMS": { - "title": "OGC Web Map Service", - "configuration": {}, - "process_parameters": [], - "links": [] - } + "WMS": {"title": "OGC Web Map Service", "configuration": {}, "process_parameters": [], "links": []} } service_type_2 = { "WMS": { "title": "A duplicate OGC Web Map Service", "configuration": {}, "process_parameters": [], - "links": [] + "links": [], } } requests_mock.get(backend1 + "/service_types", json=service_type_1) @@ -421,8 +416,8 @@ def test_service_types_warns_about_duplicate_service( assert actual_service_types == expected_service_types expected_log_message = ( - 'Conflicting secondary service types: "WMS" is present in more than one backend, ' + - 'already found in backend: b1' + 'Conflicting secondary service types: "WMS" is present in more than one backend, ' + + "already found in backend: b1" ) assert expected_log_message in caplog.text @@ -431,13 +426,13 @@ def service_metadata_wmts_foo(self): return ServiceMetadata( id="wmts-foo", process={"process_graph": {"foo": {"process_id": "foo", "arguments": {}}}}, - url='https://oeo.net/wmts/foo', + url="https://oeo.net/wmts/foo", type="WMTS", enabled=True, configuration={"version": "0.5.8"}, attributes={}, title="Test WMTS service", - created=dt.datetime(2020, 4, 9, 15, 5, 8) + created=dt.datetime(2020, 4, 9, 15, 5, 8), ) @pytest.fixture @@ -548,11 +543,8 @@ def test_create_service_succeeds( process_graph = {"foo": {"process_id": "foo", "arguments": {}}} requests_mock.post( backend1 + "/services", - headers={ - "OpenEO-Identifier": backend_service_id, - "Location": location_backend_1 - }, - status_code=201 + headers={"OpenEO-Identifier": backend_service_id, "Location": location_backend_1}, + status_code=201, ) requests_mock.get(backend1 + "/service_types", json=self.SERVICE_TYPES_ONLT_WMTS) @@ -593,11 +585,8 @@ def test_create_service_raises_serviceunsupportedexception( process_graph = {"foo": {"process_id": "foo", "arguments": {}}} mock_post = requests_mock.post( backend1 + "/services", - headers={ - "OpenEO-Identifier": "wmts-foo", - "Location": location_backend_1 - }, - status_code=201 + headers={"OpenEO-Identifier": "wmts-foo", "Location": location_backend_1}, + status_code=201, ) processing = AggregatorProcessing(backends=multi_backend_connection, catalog=catalog) implementation = AggregatorSecondaryServices(backends=multi_backend_connection, processing=processing) @@ -609,7 +598,7 @@ def test_create_service_raises_serviceunsupportedexception( process_graph=process_graph, service_type="does-not-exist", api_version="1.0.0", - configuration={} + configuration={}, ) assert not mock_post.called # The backend that we are using should support the GET /service_types endpoint, @@ -858,7 +847,6 @@ def test_update_service_backend_response_is_an_error_status( class TestInternalCollectionMetadata: - def test_get_set_backends_for_collection(self): internal = _InternalCollectionMetadata() internal.set_backends_for_collection("S2", ["b1", "b3"]) @@ -880,7 +868,6 @@ def test_list_backends_for_collections(self): @pytest.mark.usefixtures("flask_app") # Automatically enter flask app context for `url_for` to work class TestAggregatorCollectionCatalog: - def test_get_all_metadata_simple(self, catalog, backend1, backend2, requests_mock): requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S3"}]}) @@ -896,9 +883,7 @@ def test_get_all_metadata_simple(self, catalog, backend1, backend2, requests_moc }, ] - def test_get_all_metadata_common_collections_minimal( - self, catalog, backend1, backend2, requests_mock - ): + def test_get_all_metadata_common_collections_minimal(self, catalog, backend1, backend2, requests_mock): requests_mock.get( backend1 + "/collections", json={"collections": [{"id": "S3"}, {"id": "S4"}]}, @@ -935,9 +920,7 @@ def test_get_all_metadata_common_collections_minimal( }, ] - def test_get_all_metadata_common_collections_merging( - self, catalog, backend1, backend2, requests_mock - ): + def test_get_all_metadata_common_collections_merging(self, catalog, backend1, backend2, requests_mock): requests_mock.get( backend1 + "/collections", json={ @@ -953,11 +936,7 @@ def test_get_all_metadata_common_collections_merging( "providers": [{"name": "ESA", "roles": ["producer"]}], "extent": { "spatial": {"bbox": [[-10, 20, 30, 50]]}, - "temporal": { - "interval": [ - ["2011-01-01T00:00:00Z", "2019-01-01T00:00:00Z"] - ] - }, + "temporal": {"interval": [["2011-01-01T00:00:00Z", "2019-01-01T00:00:00Z"]]}, }, "cube:dimensions": { "bands": {"type": "bands", "values": ["B01", "B02"]}, @@ -988,11 +967,7 @@ def test_get_all_metadata_common_collections_merging( "providers": [{"name": "ESA", "roles": ["licensor"]}], "extent": { "spatial": {"bbox": [[-20, -20, 40, 40]]}, - "temporal": { - "interval": [ - ["2012-02-02T00:00:00Z", "2019-01-01T00:00:00Z"] - ] - }, + "temporal": {"interval": [["2012-02-02T00:00:00Z", "2019-01-01T00:00:00Z"]]}, }, "cube:dimensions": { "bands": {"type": "bands", "values": ["B01", "B02"]}, @@ -1051,9 +1026,7 @@ def test_get_all_metadata_common_collections_merging( }, ] - def test_get_best_backend_for_collections_basic( - self, catalog, backend1, backend2, requests_mock - ): + def test_get_best_backend_for_collections_basic(self, catalog, backend1, backend2, requests_mock): requests_mock.get( backend1 + "/collections", json={"collections": [{"id": "S3"}, {"id": "S4"}]}, @@ -1070,47 +1043,33 @@ def test_get_best_backend_for_collections_basic( assert catalog.get_backend_candidates_for_collections(["S3", "S4"]) == ["b1"] assert catalog.get_backend_candidates_for_collections(["S4", "S5"]) == ["b2"] - with pytest.raises( - OpenEOApiException, match="Collections across multiple backends" - ): + with pytest.raises(OpenEOApiException, match="Collections across multiple backends"): catalog.get_backend_candidates_for_collections(["S3", "S4", "S5"]) - def test_get_collection_metadata_basic( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) - requests_mock.get( - backend1 + "/collections/S2", json={"id": "S2", "title": "b1's S2"} - ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S3"}]} - ) - requests_mock.get( - backend2 + "/collections/S3", json={"id": "S3", "title": "b2's S3"} - ) + def test_get_collection_metadata_basic(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) + requests_mock.get(backend1 + "/collections/S2", json={"id": "S2", "title": "b1's S2"}) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S3"}]}) + requests_mock.get(backend2 + "/collections/S3", json={"id": "S3", "title": "b2's S3"}) metadata = catalog.get_collection_metadata("S2") assert metadata == { - 'id': 'S2', 'title': "b1's S2", + "id": "S2", + "title": "b1's S2", "summaries": {"federation:backends": ["b1"]}, } metadata = catalog.get_collection_metadata("S3") assert metadata == { - "id": "S3", "title": "b2's S3", + "id": "S3", + "title": "b2's S3", "summaries": {"federation:backends": ["b2"]}, } with pytest.raises(CollectionNotFoundException): catalog.get_collection_metadata("S5") - def test_get_collection_metadata_merging( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_get_collection_metadata_merging(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend1 + "/collections/S2", json={ @@ -1133,9 +1092,7 @@ def test_get_collection_metadata_merging( "datasource_type": "1sentinel-2-grd", }, ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend2 + "/collections/S2", json={ @@ -1202,12 +1159,8 @@ def test_get_collection_metadata_merging( "sci:citation": "Modified Copernicus Sentinel data [Year]/Sentinel Hub", } - def test_get_collection_metadata_merging_summaries( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_get_collection_metadata_merging_summaries(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend1 + "/collections/S2", json={ @@ -1235,9 +1188,7 @@ def test_get_collection_metadata_merging_summaries( }, }, ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend2 + "/collections/S2", json={ @@ -1307,12 +1258,8 @@ def test_get_collection_metadata_merging_summaries( "type": "Collection", } - def test_get_collection_metadata_merging_extent( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_get_collection_metadata_merging_extent(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend1 + "/collections/S2", json={ @@ -1323,9 +1270,7 @@ def test_get_collection_metadata_merging_extent( }, }, ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend2 + "/collections/S2", json={ @@ -1362,12 +1307,8 @@ def test_get_collection_metadata_merging_extent( }, } - def test_get_collection_metadata_merging_links( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_get_collection_metadata_merging_links(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend1 + "/collections/S2", json={ @@ -1389,9 +1330,7 @@ def test_get_collection_metadata_merging_links( ], }, ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend2 + "/collections/S2", json={ @@ -1434,12 +1373,8 @@ def test_get_collection_metadata_merging_links( }, } - def test_get_collection_metadata_merging_removes_duplicate_links( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_get_collection_metadata_merging_removes_duplicate_links(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend1 + "/collections/S2", json={ @@ -1462,9 +1397,7 @@ def test_get_collection_metadata_merging_removes_duplicate_links( ], }, ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend2 + "/collections/S2", json={ @@ -1510,12 +1443,8 @@ def test_get_collection_metadata_merging_removes_duplicate_links( }, } - def test_get_collection_metadata_merging_cubedimensions( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_get_collection_metadata_merging_cubedimensions(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) b1_bands = ["VV", "VH", "HV", "HH"] requests_mock.get( backend1 + "/collections/S2", @@ -1546,9 +1475,7 @@ def test_get_collection_metadata_merging_cubedimensions( "summaries": {"eo:bands": [{"name": b} for b in b1_bands]}, }, ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) b2_bands = ["VV", "VH", "HH", "HH+HV", "VV+VH", "HV"] requests_mock.get( backend2 + "/collections/S2", @@ -1649,9 +1576,7 @@ def test_get_collection_metadata_merging_bands_prefix( b2_bands, expected_bands, ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend1 + "/collections/S2", json={ @@ -1662,9 +1587,7 @@ def test_get_collection_metadata_merging_bands_prefix( "summaries": {"eo:bands": [{"name": b} for b in b1_bands]}, }, ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get( backend2 + "/collections/S2", json={ @@ -1701,19 +1624,11 @@ def test_get_collection_metadata_merging_bands_prefix( }, } - def test_get_collection_metadata_merging_with_error( - self, catalog, backend1, backend2, requests_mock - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_get_collection_metadata_merging_with_error(self, catalog, backend1, backend2, requests_mock): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.get(backend1 + "/collections/S2", status_code=500) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) - requests_mock.get( - backend2 + "/collections/S2", json={"id": "S2", "title": "b2's S2"} - ) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) + requests_mock.get(backend2 + "/collections/S2", json={"id": "S2", "title": "b2's S2"}) metadata = catalog.get_collection_metadata("S2") assert metadata == { @@ -1726,10 +1641,24 @@ def test_get_collection_metadata_merging_with_error( # TODO tests for caching of collection metadata def test_generate_backend_constraint_callables(self): - callables = AggregatorCollectionCatalog.generate_backend_constraint_callables([ - {"eq": {"process_id": "eq", "arguments": {"x": {"from_parameter": "value"}, "y": "b1"}, "result": True}}, - {"eq": {"process_id": "neq", "arguments": {"x": {"from_parameter": "value"}, "y": "b2"}, "result": True}}, - ]) + callables = AggregatorCollectionCatalog.generate_backend_constraint_callables( + [ + { + "eq": { + "process_id": "eq", + "arguments": {"x": {"from_parameter": "value"}, "y": "b1"}, + "result": True, + } + }, + { + "eq": { + "process_id": "neq", + "arguments": {"x": {"from_parameter": "value"}, "y": "b2"}, + "result": True, + } + }, + ] + ) equals_b1, differs_from_b2 = callables assert equals_b1("b1") is True assert equals_b1("b2") is False @@ -1790,19 +1719,18 @@ def test_get_collection_metadata_caching( with config_overrides(**overrides): catalog = AggregatorCollectionCatalog(backends=multi_backend_connection) - metadata = catalog.get_collection_metadata("S2") - assert metadata == DictSubSet({'id': 'S2', 'title': "b1's S2"}) + assert metadata == DictSubSet({"id": "S2", "title": "b1's S2"}) assert (b1s2.call_count, b2s2.call_count) == (1, 1) with clock_mock(offset=10): metadata = catalog.get_collection_metadata("S2") - assert metadata == DictSubSet({'id': 'S2', 'title': "b1's S2"}) + assert metadata == DictSubSet({"id": "S2", "title": "b1's S2"}) assert (b1s2.call_count, b2s2.call_count) == (1, 1) with clock_mock(offset=100): metadata = catalog.get_collection_metadata("S2") - assert metadata == DictSubSet({'id': 'S2', 'title': "b1's S2"}) + assert metadata == DictSubSet({"id": "S2", "title": "b1's S2"}) assert (b1s2.call_count, b2s2.call_count) == (2, 2) assert isinstance(catalog._memoizer, DictMemoizer) @@ -1811,11 +1739,10 @@ def test_get_collection_metadata_caching( class TestJobIdMapping: - def test_get_aggregator_job_id(self): - assert JobIdMapping.get_aggregator_job_id( - backend_job_id="j0bId-f00b6r", backend_id="vito" - ) == "vito-j0bId-f00b6r" + assert ( + JobIdMapping.get_aggregator_job_id(backend_job_id="j0bId-f00b6r", backend_id="vito") == "vito-j0bId-f00b6r" + ) def test_parse_aggregator_job_id(self, multi_backend_connection): assert JobIdMapping.parse_aggregator_job_id( @@ -1836,11 +1763,11 @@ def test_parse_aggregator_job_id_fail(self, multi_backend_connection): class TestServiceIdMapping: - def test_get_aggregator_job_id(self): - assert ServiceIdMapping.get_aggregator_service_id( - backend_service_id="service-x17-abc", backend_id="vito" - ) == "vito-service-x17-abc" + assert ( + ServiceIdMapping.get_aggregator_service_id(backend_service_id="service-x17-abc", backend_id="vito") + == "vito-service-x17-abc" + ) def test_parse_aggregator_job_id(self, multi_backend_connection): assert ServiceIdMapping.parse_aggregator_service_id( @@ -1908,10 +1835,10 @@ def test_get_process_registry( ], "returns": {"schema": {}}, "federation:backends": ["b1"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "mean", @@ -1919,10 +1846,10 @@ def test_get_process_registry( "parameters": [{"name": "data", "schema": {}, "description": "data"}], "returns": {"schema": {}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "multiply", @@ -1933,10 +1860,10 @@ def test_get_process_registry( ], "returns": {"schema": {}}, "federation:backends": ["b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, ] @@ -1950,12 +1877,22 @@ def test_get_process_registry( def test_get_process_registry_caching( self, multi_backend_connection, backend1, backend2, requests_mock, overrides, expected_cache_types ): - b1p = requests_mock.get(backend1 + "/processes", json={"processes": [ - {"id": "add", "parameters": [{"name": "x"}, {"name": "y"}]}, - ]}) - b2p = requests_mock.get(backend2 + "/processes", json={"processes": [ - {"id": "multiply", "parameters": [{"name": "x"}, {"name": "y"}]}, - ]}) + b1p = requests_mock.get( + backend1 + "/processes", + json={ + "processes": [ + {"id": "add", "parameters": [{"name": "x"}, {"name": "y"}]}, + ] + }, + ) + b2p = requests_mock.get( + backend2 + "/processes", + json={ + "processes": [ + {"id": "multiply", "parameters": [{"name": "x"}, {"name": "y"}]}, + ] + }, + ) with config_overrides(**overrides): catalog = AggregatorCollectionCatalog(backends=multi_backend_connection) @@ -2028,10 +1965,10 @@ def test_get_process_registry_parameter_differences( ], "returns": {"schema": {}}, "federation:backends": ["b1"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "mean", @@ -2039,10 +1976,10 @@ def test_get_process_registry_parameter_differences( "parameters": [{"name": "array", "schema": {}, "description": "array"}], "returns": {"schema": {}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "multiply", @@ -2053,9 +1990,9 @@ def test_get_process_registry_parameter_differences( ], "returns": {"schema": {}}, "federation:backends": ["b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, ] diff --git a/tests/test_caching.py b/tests/test_caching.py index 84e4650e..180f9647 100644 --- a/tests/test_caching.py +++ b/tests/test_caching.py @@ -37,7 +37,6 @@ def __call__(self): class TestTtlCache: - def test_basic(self): cache = TtlCache() cache.set("foo", "bar") @@ -138,7 +137,6 @@ def callback(): class TestNullMemoizer(_TestMemoizer): - def test_basic(self): cache = NullMemoizer() callback = self._build_callback() @@ -147,11 +145,13 @@ def test_basic(self): class TestJsonSerde: - - @pytest.mark.parametrize(["value", "serialized"], [ - ({"foo": 123}, b'{"foo":123}'), - ({"foo": [1, 2, 3]}, b'{"foo":[1,2,3]}'), - ]) + @pytest.mark.parametrize( + ["value", "serialized"], + [ + ({"foo": 123}, b'{"foo":123}'), + ({"foo": [1, 2, 3]}, b'{"foo":[1,2,3]}'), + ], + ) def test_default(self, value, serialized): serde = JsonSerDe() assert serde.serialize(value) == serialized @@ -186,10 +186,7 @@ def __jsonserde_load__(cls, data: dict): def test_global_json_serde(self): icm = _InternalCollectionMetadata() icm.set_backends_for_collection(cid="S2", backends=["b5", "b9"]) - data = { - "color": "green", - "icm": icm - } + data = {"color": "green", "icm": icm} serialized = json_serde.serialize(data) assert isinstance(serialized, bytes) @@ -271,14 +268,13 @@ def count(): class TestJsonDictMemoizer(TestDictMemoizer): - def test_json_coercion(self): cache = JsonDictMemoizer() callback = lambda: {"ids": [1, 2, 3], "size": (4, 5, 6)} - assert cache.get_or_call(key="data", callback=callback) == {'ids': [1, 2, 3], 'size': (4, 5, 6)} + assert cache.get_or_call(key="data", callback=callback) == {"ids": [1, 2, 3], "size": (4, 5, 6)} # This is expected: tuple is not supported in JSON and silently converted to list - assert cache.get_or_call(key="data", callback=callback) == {'ids': [1, 2, 3], 'size': [4, 5, 6]} + assert cache.get_or_call(key="data", callback=callback) == {"ids": [1, 2, 3], "size": [4, 5, 6]} def test_json_encode_error(self, caplog): caplog.set_level(logging.ERROR) @@ -301,7 +297,6 @@ def __init__(self): class TestChainedMemoizer(_TestMemoizer): - def test_empty(self): cache = ChainedMemoizer(memoizers=[]) callback = self._build_callback() @@ -409,9 +404,7 @@ def test_failing_callback(self, caplog): assert dm2.get_or_call(key="count", callback=callback) == 1003 - class TestZkMemoizer(_TestMemoizer): - @pytest.fixture def zk_client(self) -> mock.Mock: """Simple ad-hoc ZooKeeper client fixture using a dictionary for storage.""" @@ -463,26 +456,35 @@ def test_basic(self, zk_client): callback = self._build_callback() zk_client.get.assert_not_called() assert zk_cache.get_or_call(key="count", callback=callback) == 100 - zk_client.get.assert_called_once_with(path='/test/count') - - @pytest.mark.parametrize(["side_effects", "expected_error"], [ - ({"start": RuntimeError}, "failed to start connection"), - ({"start": kazoo.exceptions.KazooException}, "failed to start connection"), - ({"get": RuntimeError}, "unexpected get failure"), - ({"get": kazoo.exceptions.KazooException}, "unexpected get failure"), - ({"create": RuntimeError}, "failed to create path '/test/count'"), - ({"create": kazoo.exceptions.KazooException}, "failed to create path '/test/count'"), - ({ - "get": (lambda *arg, **kwargs: ('{"foo":"bar"}', DummyZnodeStat(last_modified=123))), - "set": RuntimeError, - }, "failed to set path '/test/count'"), - ({ - "get": (lambda *arg, **kwargs: ('{"foo":"bar"}', DummyZnodeStat(last_modified=123))), - "set": kazoo.exceptions.KazooException, - }, "failed to set path '/test/count'"), - ({"stop": RuntimeError}, "failed to stop connection"), - ({"stop": kazoo.exceptions.KazooException}, "failed to stop connection"), - ]) + zk_client.get.assert_called_once_with(path="/test/count") + + @pytest.mark.parametrize( + ["side_effects", "expected_error"], + [ + ({"start": RuntimeError}, "failed to start connection"), + ({"start": kazoo.exceptions.KazooException}, "failed to start connection"), + ({"get": RuntimeError}, "unexpected get failure"), + ({"get": kazoo.exceptions.KazooException}, "unexpected get failure"), + ({"create": RuntimeError}, "failed to create path '/test/count'"), + ({"create": kazoo.exceptions.KazooException}, "failed to create path '/test/count'"), + ( + { + "get": (lambda *arg, **kwargs: ('{"foo":"bar"}', DummyZnodeStat(last_modified=123))), + "set": RuntimeError, + }, + "failed to set path '/test/count'", + ), + ( + { + "get": (lambda *arg, **kwargs: ('{"foo":"bar"}', DummyZnodeStat(last_modified=123))), + "set": kazoo.exceptions.KazooException, + }, + "failed to set path '/test/count'", + ), + ({"stop": RuntimeError}, "failed to stop connection"), + ({"stop": kazoo.exceptions.KazooException}, "failed to stop connection"), + ], + ) def test_broken_client(self, caplog, side_effects, expected_error): """Test that callback keeps working if ZooKeeper client is broken""" caplog.set_level(logging.ERROR) @@ -561,15 +563,18 @@ def test_invalidate(self, zk_client, caplog): with clock_mock(3000): assert zk_cache.get_or_call(key="count", callback=callback) == 102 - @pytest.mark.parametrize(["prefix", "key", "path"], [ - ("test", "count", "/test/count"), - ("/test/", "/count/", "/test/count"), - ("test/v1", "user/count/", "/test/v1/user/count"), - ("test", ("count",), "/test/count"), - ("test", ("user", "count"), "/test/user/count"), - ("test", ("v1", "user", "count", "today"), "/test/v1/user/count/today"), - ("test", ["v1", "user", "count", "today"], "/test/v1/user/count/today"), - ]) + @pytest.mark.parametrize( + ["prefix", "key", "path"], + [ + ("test", "count", "/test/count"), + ("/test/", "/count/", "/test/count"), + ("test/v1", "user/count/", "/test/v1/user/count"), + ("test", ("count",), "/test/count"), + ("test", ("user", "count"), "/test/user/count"), + ("test", ("v1", "user", "count", "today"), "/test/v1/user/count/today"), + ("test", ["v1", "user", "count", "today"], "/test/v1/user/count/today"), + ], + ) def test_key_to_path(self, zk_client, prefix, key, path): zk_cache = ZkMemoizer(client=zk_client, path_prefix=prefix) callback = self._build_callback() @@ -599,13 +604,16 @@ def test_create_on_existing_node(self, zk_client, caplog): assert zk_cache.get_or_call(key="count", callback=callback) == 100 assert "failed to create node '/test/count': already exists." in caplog.text - @pytest.mark.parametrize(["data", "expected_prefix"], [ - (1234, b"1234"), - ("just a string", b'"just'), - ({"color": "green", "sizes": [1, 2, 3]}, b'{"color":"green'), - ([{"id": 3}, {"id": 99}], b'[{"id":'), - ([123, None, False, True, "foo"], b'[123'), - ]) + @pytest.mark.parametrize( + ["data", "expected_prefix"], + [ + (1234, b"1234"), + ("just a string", b'"just'), + ({"color": "green", "sizes": [1, 2, 3]}, b'{"color":"green'), + ([{"id": 3}, {"id": 99}], b'[{"id":'), + ([123, None, False, True, "foo"], b"[123"), + ], + ) def test_serializing(self, zk_client, data, expected_prefix): zk_cache = ZkMemoizer(client=zk_client, path_prefix="test") @@ -696,7 +704,6 @@ def fun2_cached(): class TestMemoizerFromConfig: - def test_null_memoizer(self): with config_overrides(memoizer={"type": "null"}): memoizer = memoizer_from_config(namespace="test") diff --git a/tests/test_connection.py b/tests/test_connection.py index a864968c..371a4b4a 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -23,12 +23,11 @@ class TestBackendConnection: - def test_plain_basic_auth_fails(self, requests_mock): - requests_mock.get("https://foo.test/", json={ - "api_version": "1.0.0", - "endpoints": [{"path": "/credentials/basic", "methods": ["GET"]}] - }) + requests_mock.get( + "https://foo.test/", + json={"api_version": "1.0.0", "endpoints": [{"path": "/credentials/basic", "methods": ["GET"]}]}, + ) requests_mock.get("https://foo.test/credentials/basic", json={"access_token": "3nt3r"}) con = BackendConnection(id="foo", url="https://foo.test", configured_oidc_providers=[]) with pytest.raises(LockedAuthException): @@ -79,13 +78,21 @@ def test_basic_auth_from_request_failure(self, requests_mock, exception): def test_plain_oidc_auth_fails(self, requests_mock): requests_mock.get("https://foo.test/", json={"api_version": "1.0.0"}) - requests_mock.get("https://foo.test/credentials/oidc", json={"providers": [ - {"id": "egi", "issuer": "https://egi.test", "title": "EGI"}, - ]}) - requests_mock.get("https://egi.test/.well-known/openid-configuration", json={ - "token_endpoint": "https://egi.test/token", - "userinfo_endpoint": "https://egi.test/userinfo", - }) + requests_mock.get( + "https://foo.test/credentials/oidc", + json={ + "providers": [ + {"id": "egi", "issuer": "https://egi.test", "title": "EGI"}, + ] + }, + ) + requests_mock.get( + "https://egi.test/.well-known/openid-configuration", + json={ + "token_endpoint": "https://egi.test/token", + "userinfo_endpoint": "https://egi.test/userinfo", + }, + ) requests_mock.post("https://egi.test/token", json={"access_token": "3nt3r"}) con = BackendConnection(id="foo", url="https://foo.test") with pytest.raises(LockedAuthException): @@ -94,9 +101,14 @@ def test_plain_oidc_auth_fails(self, requests_mock): @pytest.mark.parametrize("backend_pid", ["egi", "aho"]) def test_oidc_auth_from_request(self, requests_mock, backend_pid): requests_mock.get("https://foo.test/", json={"api_version": "1.0.0"}) - requests_mock.get("https://foo.test/credentials/oidc", json={"providers": [ - {"id": backend_pid, "issuer": "https://egi.test", "title": "EGI"}, - ]}) + requests_mock.get( + "https://foo.test/credentials/oidc", + json={ + "providers": [ + {"id": backend_pid, "issuer": "https://egi.test", "title": "EGI"}, + ] + }, + ) def get_me(request: requests.Request, context): if request.headers.get("Authorization") == f"Bearer oidc/{backend_pid}/l3tm31n": @@ -128,9 +140,14 @@ def get_me(request: requests.Request, context): ) def test_oidc_auth_from_request_failure(self, requests_mock, exception): requests_mock.get("https://foo.test/", json={"api_version": "1.0.0"}) - requests_mock.get("https://foo.test/credentials/oidc", json={"providers": [ - {"id": "egi", "issuer": "https://egi.test", "title": "EGI"}, - ]}) + requests_mock.get( + "https://foo.test/credentials/oidc", + json={ + "providers": [ + {"id": "egi", "issuer": "https://egi.test", "title": "EGI"}, + ] + }, + ) con = BackendConnection(id="foo", url="https://foo.test") request = flask.Request(environ={"HTTP_AUTHORIZATION": "Bearer oidc/egi/l3tm31n"}) @@ -185,7 +202,6 @@ def test_invalidate(self, requests_mock): con.get("/") def test_init_vs_default_timeout(self, requests_mock): - def get_handler(expected_timeout: int): def capabilities(request, context): assert request.timeout == expected_timeout @@ -196,8 +212,7 @@ def capabilities(request, context): # Capabilities request during init m = requests_mock.get("https://foo.test/", json=get_handler(expected_timeout=5)) con = BackendConnection( - id="foo", url="https://foo.test", configured_oidc_providers=[], - default_timeout=20, init_timeout=5 + id="foo", url="https://foo.test", configured_oidc_providers=[], default_timeout=20, init_timeout=5 ) assert m.call_count == 1 @@ -207,14 +222,19 @@ def capabilities(request, context): assert m.call_count == 1 def test_version_discovery_timeout(self, requests_mock): - well_known = requests_mock.get("https://foo.test/.well-known/openeo", status_code=200, json={ - "versions": [{"api_version": "1.0.0", "url": "https://oeo.test/v1/"}, ], - }) + well_known = requests_mock.get( + "https://foo.test/.well-known/openeo", + status_code=200, + json={ + "versions": [ + {"api_version": "1.0.0", "url": "https://oeo.test/v1/"}, + ], + }, + ) requests_mock.get("https://oeo.test/v1/", status_code=200, json={"api_version": "1.0.0"}) _ = BackendConnection( - id="foo", url="https://foo.test", configured_oidc_providers=[], - default_timeout=20, init_timeout=5 + id="foo", url="https://foo.test", configured_oidc_providers=[], default_timeout=20, init_timeout=5 ) assert well_known.call_count == 1 assert well_known.request_history[-1].timeout == 5 @@ -228,11 +248,7 @@ def test_from_config(self, backend1, backend2): backends = MultiBackendConnection.from_config() assert set(c.id for c in backends.get_connections()) == {"b1", "b2"} - - @pytest.mark.parametrize(["bid1", "bid2"], [ - ("b1", "b1-dev"), ("b1", "b1.dev"), ("b1", "b1:dev"), - ("AA", "BB") - ]) + @pytest.mark.parametrize(["bid1", "bid2"], [("b1", "b1-dev"), ("b1", "b1.dev"), ("b1", "b1:dev"), ("AA", "BB")]) def test_backend_id_format_invalid(self, backend1, backend2, bid1, bid2): with pytest.raises(ValueError, match="should be alphanumeric only"): _ = MultiBackendConnection({bid1: backend1, bid2: backend2}, configured_oidc_providers=[]) @@ -356,7 +372,8 @@ def test_build_oidc_handling_basic(self, pid, issuer, title): configured_oidc_providers=[ OidcProvider(id=pid, issuer=issuer, title=title), OidcProvider(id="egi-dev", issuer="https://egi-dev.test", title="EGI dev"), - ]) + ], + ) for con in multi_backend_connection: assert con.get_oidc_provider_map() == {pid: "egi"} @@ -395,7 +412,8 @@ def test_build_oidc_handling_intersection(self, requests_mock, backend1, backend OidcProvider("xa", "https://x.test", "A-X"), OidcProvider("ya", "https://y.test", "A-Y"), OidcProvider("za", "https://z.test", "A-Z"), - ]) + ], + ) assert [con.get_oidc_provider_map() for con in multi_backend_connection] == [ {"xa": "x1", "ya": "y1"}, @@ -425,7 +443,8 @@ def test_build_oidc_handling_intersection_empty(self, requests_mock, backend1, b configured_oidc_providers=[ OidcProvider("ya", "https://y.test", "A-Y"), OidcProvider("za", "https://z.test", "A-Z"), - ]) + ], + ) assert [con.get_oidc_provider_map() for con in multi_backend_connection] == [ {}, @@ -466,42 +485,59 @@ def test_build_oidc_handling_order(self, requests_mock, backend1, backend2): OidcProvider("a-a", "https://a.test", "A-A"), OidcProvider("a-d", "https://d.test", "A-D"), OidcProvider("a-c", "https://c.test/", "A-C"), - ]) + ], + ) assert [con.get_oidc_provider_map() for con in multi_backend_connection] == [ - {'a-a': 'a1', 'a-b': 'b1', 'a-c': 'c1', 'a-d': 'd1', 'a-e': 'e1'}, - {'a-a': 'a2', 'a-b': 'b2', 'a-c': 'c2', 'a-d': 'd2', 'a-e': 'e2'}, + {"a-a": "a1", "a-b": "b1", "a-c": "c1", "a-d": "d1", "a-e": "e1"}, + {"a-a": "a2", "a-b": "b2", "a-c": "c2", "a-d": "d2", "a-e": "e2"}, ] def test_oidc_provider_mapping(self, requests_mock): domain1 = "https://b1.test/v1" requests_mock.get(domain1 + "/", json={"api_version": "1.0.0"}) - requests_mock.get(domain1 + "/credentials/oidc", json={"providers": [ - {"id": "a1", "issuer": "https://a.test/", "title": "A1"}, - {"id": "x1", "issuer": "https://x.test/", "title": "X1"}, - {"id": "y1", "issuer": "https://y.test/", "title": "Y1"}, - ]}) + requests_mock.get( + domain1 + "/credentials/oidc", + json={ + "providers": [ + {"id": "a1", "issuer": "https://a.test/", "title": "A1"}, + {"id": "x1", "issuer": "https://x.test/", "title": "X1"}, + {"id": "y1", "issuer": "https://y.test/", "title": "Y1"}, + ] + }, + ) domain2 = "https://b2.test/v1" requests_mock.get(domain2 + "/", json={"api_version": "1.0.0"}) - requests_mock.get(domain2 + "/credentials/oidc", json={"providers": [ - {"id": "b2", "issuer": "https://b.test", "title": "B2"}, - {"id": "x2", "issuer": "https://x.test", "title": "X2"}, - {"id": "y2", "issuer": "https://y.test", "title": "Y2"}, - ]}) + requests_mock.get( + domain2 + "/credentials/oidc", + json={ + "providers": [ + {"id": "b2", "issuer": "https://b.test", "title": "B2"}, + {"id": "x2", "issuer": "https://x.test", "title": "X2"}, + {"id": "y2", "issuer": "https://y.test", "title": "Y2"}, + ] + }, + ) domain3 = "https://b3.test/v1" requests_mock.get(domain3 + "/", json={"api_version": "1.0.0"}) - requests_mock.get(domain3 + "/credentials/oidc", json={"providers": [ - {"id": "c3", "issuer": "https://c.test/", "title": "C3"}, - {"id": "x3", "issuer": "https://x.test", "title": "X3"}, - {"id": "y3", "issuer": "https://y.test/", "title": "Y3"}, - ]}) + requests_mock.get( + domain3 + "/credentials/oidc", + json={ + "providers": [ + {"id": "c3", "issuer": "https://c.test/", "title": "C3"}, + {"id": "x3", "issuer": "https://x.test", "title": "X3"}, + {"id": "y3", "issuer": "https://y.test/", "title": "Y3"}, + ] + }, + ) multi_backend_connection = MultiBackendConnection( backends={"b1": domain1, "b2": domain2, "b3": domain3}, configured_oidc_providers=[ OidcProvider("ax", "https://x.test", "A-X"), OidcProvider("ay", "https://y.test", "A-Y"), - ]) + ], + ) def get_me(request: requests.Request, context): auth = request.headers.get("Authorization") @@ -529,9 +565,14 @@ def get_me(request: requests.Request, context): def test_oidc_provider_mapping_changes(self, requests_mock, caplog): domain1 = "https://b1.test/v1" requests_mock.get(domain1 + "/", json={"api_version": "1.0.0"}) - requests_mock.get(domain1 + "/credentials/oidc", json={"providers": [ - {"id": "x1", "issuer": "https://x.test/", "title": "X1"}, - ]}) + requests_mock.get( + domain1 + "/credentials/oidc", + json={ + "providers": [ + {"id": "x1", "issuer": "https://x.test/", "title": "X1"}, + ] + }, + ) def get_me(request: requests.Request, context): auth = request.headers.get("Authorization") @@ -545,8 +586,7 @@ def get_me(request: requests.Request, context): ] multi_backend_connection = MultiBackendConnection( - backends={"b1": domain1}, - configured_oidc_providers=configured_oidc_providers + backends={"b1": domain1}, configured_oidc_providers=configured_oidc_providers ) warnings = "\n".join(r.getMessage() for r in caplog.records if r.levelno >= logging.WARNING) @@ -559,9 +599,14 @@ def get_me(request: requests.Request, context): # Change backend's oidc config, wait for connections cache to expire with clock_mock(offset=1000): - requests_mock.get(domain1 + "/credentials/oidc", json={"providers": [ - {"id": "y1", "issuer": "https://y.test/", "title": "Y1"}, - ]}) + requests_mock.get( + domain1 + "/credentials/oidc", + json={ + "providers": [ + {"id": "y1", "issuer": "https://y.test/", "title": "Y1"}, + ] + }, + ) # Try old auth headers request = flask.Request(environ={"HTTP_AUTHORIZATION": "Bearer oidc/ax/yadayadayada"}) with pytest.raises(OpenEOApiException, match="Back-end 'b1' does not support OIDC provider 'ax'"): @@ -576,10 +621,7 @@ def get_me(request: requests.Request, context): assert con.get("/me").json() == {"user_id": "Bearer oidc/y1/yadayadayada"} def test_connection_invalidate(self, backend1): - multi_backend_connection = MultiBackendConnection( - backends={"b1": backend1}, - configured_oidc_providers=[] - ) + multi_backend_connection = MultiBackendConnection(backends={"b1": backend1}, configured_oidc_providers=[]) con1 = multi_backend_connection.get_connection("b1") assert con1.get("/").json() == DictSubSet({"api_version": "1.1.0"}) @@ -594,8 +636,7 @@ def test_connection_invalidate(self, backend1): def test_get_connections(self, requests_mock, backend1, backend2): multi_backend_connection = MultiBackendConnection( - backends={"b1": backend1, "b2": backend2}, - configured_oidc_providers=[] + backends={"b1": backend1, "b2": backend2}, configured_oidc_providers=[] ) assert set(b.id for b in multi_backend_connection.get_connections()) == {"b1", "b2"} diff --git a/tests/test_egi.py b/tests/test_egi.py index 777af7d8..111d6011 100644 --- a/tests/test_egi.py +++ b/tests/test_egi.py @@ -9,9 +9,7 @@ def test_parse_eduperson_entitlement(): - assert parse_eduperson_entitlement( - "urn:mace:egi.eu:group:vo.openeo.cloud#aai.egi.eu" - ) == Entitlement( + assert parse_eduperson_entitlement("urn:mace:egi.eu:group:vo.openeo.cloud#aai.egi.eu") == Entitlement( namespace="urn:mace:egi.eu", vo="vo.openeo.cloud", group=None, role=None, authority="aai.egi.eu" ) assert parse_eduperson_entitlement( @@ -22,20 +20,29 @@ def test_parse_eduperson_entitlement(): assert parse_eduperson_entitlement( "urn:mace:egi.eu:group:vo.openeo.cloud:vo.openeo-sub.cloud:role=early_adopter#aai.egi.eu" ) == Entitlement( - namespace="urn:mace:egi.eu", vo="vo.openeo.cloud", group="vo.openeo-sub.cloud", role="early_adopter", - authority="aai.egi.eu" + namespace="urn:mace:egi.eu", + vo="vo.openeo.cloud", + group="vo.openeo-sub.cloud", + role="early_adopter", + authority="aai.egi.eu", ) assert parse_eduperson_entitlement( "urn:mace:egi-dev.eu:group:vo.openeo-dev.cloud:vo.openeo-sub.cloud:role=Early-Adop.ter#aai.egi-dev.eu" ) == Entitlement( - namespace="urn:mace:egi-dev.eu", vo="vo.openeo-dev.cloud", group="vo.openeo-sub.cloud", role="Early-Adop.ter", - authority="aai.egi-dev.eu" + namespace="urn:mace:egi-dev.eu", + vo="vo.openeo-dev.cloud", + group="vo.openeo-sub.cloud", + role="Early-Adop.ter", + authority="aai.egi-dev.eu", ) assert parse_eduperson_entitlement( "urn:mace:egi.eu:group:openEO_test:education_package.openEO_test:admins:role=member#aai.egi.eu" ) == Entitlement( - namespace="urn:mace:egi.eu", vo="openEO_test", group="education_package.openEO_test:admins", role="member", - authority="aai.egi.eu" + namespace="urn:mace:egi.eu", + vo="openEO_test", + group="education_package.openEO_test:admins", + role="member", + authority="aai.egi.eu", ) @@ -53,13 +60,9 @@ class TestUserRole: def test_basic(self): role = UserRole("Foo") assert role.id == "Foo" - assert role.entitlement_match( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=Foo#aai.egi.eu" - ) + assert role.entitlement_match("urn:mace:egi.eu:group:vo.openeo.cloud:role=Foo#aai.egi.eu") - @pytest.mark.parametrize( - "title", ["Foo-Bar", "Foo_Bar", "FooBar", "Foo Bar", "foo bar", "foo_bar"] - ) + @pytest.mark.parametrize("title", ["Foo-Bar", "Foo_Bar", "FooBar", "Foo Bar", "foo bar", "foo_bar"]) @pytest.mark.parametrize( "entitlement_role", ["Foo-Bar", "FooBar", "Foo_Bar", "foobar", "foo-bar", "foo_bar"], @@ -67,83 +70,42 @@ def test_basic(self): def test_normalization(self, title, entitlement_role): role = UserRole(title) assert role.id == "FooBar" - assert role.entitlement_match( - f"urn:mace:egi.eu:group:vo.openeo.cloud:role={entitlement_role}#aai.egi.eu" - ) + assert role.entitlement_match(f"urn:mace:egi.eu:group:vo.openeo.cloud:role={entitlement_role}#aai.egi.eu") def test_is_early_adopter(self): - (role,) = [ - r for r in OPENEO_PLATFORM_USER_ROLES.roles if r.id == "EarlyAdopter" - ] + (role,) = [r for r in OPENEO_PLATFORM_USER_ROLES.roles if r.id == "EarlyAdopter"] is_early_adopter = role.entitlement_match - assert is_early_adopter( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu" - ) - assert is_early_adopter( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=Early_Adopter#aai.egi.eu" - ) - assert is_early_adopter( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=Early-Adopter#aai.egi.eu" - ) - assert is_early_adopter( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=EarlyAdopter#aai.egi.eu" - ) + assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu") + assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=Early_Adopter#aai.egi.eu") + assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=Early-Adopter#aai.egi.eu") + assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=EarlyAdopter#aai.egi.eu") assert not is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud#aai.egi.eu") - assert not is_early_adopter( - "urn:mace:uho.ai:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu" - ) - assert not is_early_adopter( - "urn:mace:egi.eu:group:vo.kleurenwiezen.be:role=early_adopter#aai.egi.eu" - ) - assert not is_early_adopter( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=member#aai.egi.eu" - ) - assert not is_early_adopter( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#ooi.egi.eu" - ) + assert not is_early_adopter("urn:mace:uho.ai:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu") + assert not is_early_adopter("urn:mace:egi.eu:group:vo.kleurenwiezen.be:role=early_adopter#aai.egi.eu") + assert not is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=member#aai.egi.eu") + assert not is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#ooi.egi.eu") assert not is_early_adopter("foobar") assert not is_early_adopter("") - def test_is_30day_trial(self): (role,) = [r for r in OPENEO_PLATFORM_USER_ROLES.roles if r.id == "30DayTrial"] is_30day_trial = role.entitlement_match - assert is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=30day_trial#aai.egi.eu" - ) - assert is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=30Day_Trial#aai.egi.eu" - ) - assert is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=30Day-Trial#aai.egi.eu" - ) - assert is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=30-Day-Trial#aai.egi.eu" - ) - assert is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=30DayTrial#aai.egi.eu" - ) - - assert not is_30day_trial( - "urn:mace:uho.ai:group:vo.openeo.cloud:role=30day_trial#aai.egi.eu" - ) - assert not is_30day_trial( - "urn:mace:egi.eu:group:vo.kleurenwiezen.be:role=30day_trial#aai.egi.eu" - ) - assert not is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=30day_trial#ooi.egi.eu" - ) + assert is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=30day_trial#aai.egi.eu") + assert is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=30Day_Trial#aai.egi.eu") + assert is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=30Day-Trial#aai.egi.eu") + assert is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=30-Day-Trial#aai.egi.eu") + assert is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=30DayTrial#aai.egi.eu") + + assert not is_30day_trial("urn:mace:uho.ai:group:vo.openeo.cloud:role=30day_trial#aai.egi.eu") + assert not is_30day_trial("urn:mace:egi.eu:group:vo.kleurenwiezen.be:role=30day_trial#aai.egi.eu") + assert not is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=30day_trial#ooi.egi.eu") assert not is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud#aai.egi.eu") - assert not is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=member#aai.egi.eu" - ) - assert not is_30day_trial( - "urn:mace:egi.eu:group:vo.openeo.cloud:role=30day_trial#ooi.egi.eu" - ) + assert not is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=member#aai.egi.eu") + assert not is_30day_trial("urn:mace:egi.eu:group:vo.openeo.cloud:role=30day_trial#ooi.egi.eu") assert not is_30day_trial("foobar") assert not is_30day_trial("") diff --git a/tests/test_testing.py b/tests/test_testing.py index 3d4920a1..d1ea39cc 100644 --- a/tests/test_testing.py +++ b/tests/test_testing.py @@ -29,11 +29,14 @@ def test_mock_clock_basic(fail): assert Clock.utcnow().year > 2020 -@pytest.mark.parametrize(["start", "expected_time", "expected_date"], [ - (1000, 1000, datetime.datetime(1970, 1, 1, 0, 16, 40)), - ("2021-02-21", 1613865600, datetime.datetime(2021, 2, 21)), - ("2021-02-21T12:34:56Z", 1613910896, datetime.datetime(2021, 2, 21, 12, 34, 56)), -]) +@pytest.mark.parametrize( + ["start", "expected_time", "expected_date"], + [ + (1000, 1000, datetime.datetime(1970, 1, 1, 0, 16, 40)), + ("2021-02-21", 1613865600, datetime.datetime(2021, 2, 21)), + ("2021-02-21T12:34:56Z", 1613910896, datetime.datetime(2021, 2, 21, 12, 34, 56)), + ], +) def test_mock_clock_start(start, expected_time, expected_date): assert Clock.time() == approx_now() with clock_mock(start=start): @@ -42,7 +45,7 @@ def test_mock_clock_start(start, expected_time, expected_date): assert Clock.time() == approx_now() -@pytest.mark.parametrize("step", [1, 2, .1]) +@pytest.mark.parametrize("step", [1, 2, 0.1]) def test_clock_mock_step(step): with clock_mock(start=1000, step=step): assert Clock.time() == 1000 diff --git a/tests/test_utils.py b/tests/test_utils.py index 77552301..cc158b73 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -20,7 +20,6 @@ class TestMultiDictGetter: - def test_basic(self): getter = MultiDictGetter([{"a": 1, "b": 2}, {"b": 222, "c": 333}]) assert list(getter.get("a")) == [1] @@ -45,32 +44,36 @@ def test_available_keys(self): assert getter.available_keys(["a", "c", "d"]) == ["a", "c"] def test_concat(self): - getter = MultiDictGetter([ - {"a": [1, 11], "b": [2, 22], "c": [33]}, - {"b": [222, 2222], "c": (33, 3333)} - ]) + getter = MultiDictGetter([{"a": [1, 11], "b": [2, 22], "c": [33]}, {"b": [222, 2222], "c": (33, 3333)}]) assert getter.concat("a") == [1, 11] assert getter.concat("b") == [2, 22, 222, 2222] assert getter.concat("c") == [33, 33, 3333] assert getter.concat("c", skip_duplicates=True) == [33, 3333] assert getter.concat("d") == [] - @pytest.mark.parametrize(["data", "expected", "expect_warning"], [ - ([4, 5], [1, 2, 3, 4, 5, 100], False), - ((4, 5), [1, 2, 3, 4, 5, 100], False), - (45, [1, 2, 3, 100], True), - ("45", [1, 2, 3, 100], True), - ({4: "foo", 5: "bar"}, [1, 2, 3, 100], True), - ({"foo": 4, "bar": 5}, [1, 2, 3, 100], True), - (range(4, 6), [1, 2, 3, 100], True), - ((x for x in [4, 5]), [1, 2, 3, 100], True), - ]) + @pytest.mark.parametrize( + ["data", "expected", "expect_warning"], + [ + ([4, 5], [1, 2, 3, 4, 5, 100], False), + ((4, 5), [1, 2, 3, 4, 5, 100], False), + (45, [1, 2, 3, 100], True), + ("45", [1, 2, 3, 100], True), + ({4: "foo", 5: "bar"}, [1, 2, 3, 100], True), + ({"foo": 4, "bar": 5}, [1, 2, 3, 100], True), + (range(4, 6), [1, 2, 3, 100], True), + ((x for x in [4, 5]), [1, 2, 3, 100], True), + ], + ) def test_concat_type_handling(self, data, expected, expect_warning, caplog): - getter = MultiDictGetter([ - {"a": [1, 2, 3], }, - {"a": data}, - {"a": [100]}, - ]) + getter = MultiDictGetter( + [ + { + "a": [1, 2, 3], + }, + {"a": data}, + {"a": [100]}, + ] + ) assert getter.concat("a") == expected if expect_warning: @@ -87,10 +90,12 @@ def test_first(self): assert getter.first("d", default=666) == 666 def test_select(self): - getter = MultiDictGetter([ - {"a": {"aa": {"aaa": 1, "aab": 2}, "ab": 3}, "b": {"ba": 4, "bb": {"bba": 5}}}, - {"a": {"aa": {"aaa": 10, "aac": 12}, "ac": 13}, "b": {"ba": 14, "bc": {"bbc": 15}}}, - ]) + getter = MultiDictGetter( + [ + {"a": {"aa": {"aaa": 1, "aab": 2}, "ab": 3}, "b": {"ba": 4, "bb": {"bba": 5}}}, + {"a": {"aa": {"aaa": 10, "aac": 12}, "ac": 13}, "b": {"ba": 14, "bc": {"bbc": 15}}}, + ] + ) assert list(getter.select("a").get("aa")) == [{"aaa": 1, "aab": 2}, {"aaa": 10, "aac": 12}] assert list(getter.select("a").get("ab")) == [3] assert list(getter.select("b").get("a")) == [] @@ -133,7 +138,6 @@ def test_dict_merge(): class TestEventHandler: - def test_empty(self): handler = EventHandler("event") handler.trigger() @@ -161,7 +165,6 @@ def test_failure(self): class TestBoundingBox: - def test_basic(self): bbox = BoundingBox(1, 2, 3, 4) assert bbox.west == 1 @@ -269,9 +272,11 @@ def test_common_prefix_multiple(): def test_drop_dict_keys(): assert drop_dict_keys({}, keys=["foo"]) == {} assert drop_dict_keys({"foo": 2, "bar": 3}, keys=["foo"]) == {"bar": 3} - assert drop_dict_keys( - [{"foo": 2, "bar": 3}, {"baz": 5}, {"meh": 8}], keys=["foo", "baz"] - ) == [{"bar": 3}, {}, {"meh": 8}] + assert drop_dict_keys([{"foo": 2, "bar": 3}, {"baz": 5}, {"meh": 8}], keys=["foo", "baz"]) == [ + {"bar": 3}, + {}, + {"meh": 8}, + ] assert drop_dict_keys( { "foo": {1: 1, 2: 2, 3: 3}, diff --git a/tests/test_views.py b/tests/test_views.py index 6395cff3..99eeecd7 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -75,7 +75,6 @@ def test_deploy_metadata(self, api100): capabilities = api100.get("/").assert_status_code(200).json assert "openeo_aggregator" in capabilities["processing:software"] - def test_only_oidc_auth(self, api100): res = api100.get("/").assert_status_code(200) capabilities = res.json @@ -116,7 +115,7 @@ def test_health_check_failed_backend(self, api100, requests_mock, backend1, back def test_health_check_invalid_backend(self, api100, requests_mock, backend1, backend2): requests_mock.get(backend1 + "/health", json={"health": "OK"}, headers={"Content-type": "application/json"}) - requests_mock.get(backend2 + "/health", text='Inva{id J}0n', headers={"Content-type": "application/json"}) + requests_mock.get(backend2 + "/health", text="Inva{id J}0n", headers={"Content-type": "application/json"}) resp = api100.get("/health").assert_status_code(500) assert resp.json == { "backend_status": { @@ -132,10 +131,7 @@ def test_health_check_invalid_backend(self, api100, requests_mock, backend1, bac } - - class TestCatalog: - def test_collections_basic(self, api100, requests_mock, backend1, backend2): requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S1"}, {"id": "S2"}]}) requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S3"}]}) @@ -182,13 +178,16 @@ def collection_items(request, context): res.assert_status_code(200) assert res.json == {"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": "blabla"}]} - @pytest.mark.parametrize(["backend1_up", "backend2_up", "expected"], [ - (True, False, {"S1", "S2"}), - (False, True, {"S3"}), - (False, False, set()), - ]) + @pytest.mark.parametrize( + ["backend1_up", "backend2_up", "expected"], + [ + (True, False, {"S1", "S2"}), + (False, True, {"S3"}), + (False, False, set()), + ], + ) def test_collections_resilience( - self, api100, requests_mock, backend1, backend2, backend1_up, backend2_up, expected + self, api100, requests_mock, backend1, backend2, backend1_up, backend2_up, expected ): if backend1_up: requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S1"}, {"id": "S2"}]}) @@ -324,12 +323,14 @@ def test_collections_whitelist(self, api100, requests_mock, backend1, backend2, class TestAuthentication: def test_credentials_oidc_default(self, api100, backend1, backend2): res = api100.get("/credentials/oidc").assert_status_code(200).json - assert res == {"providers": [ - {"id": "egi", "issuer": "https://egi.test", "title": "EGI", "scopes": ["openid"]}, - {"id": "x-agg", "issuer": "https://x.test", "title": "X (agg)", "scopes": ["openid"]}, - {"id": "y-agg", "issuer": "https://y.test", "title": "Y (agg)", "scopes": ["openid"]}, - {"id": "z-agg", "issuer": "https://z.test", "title": "Z (agg)", "scopes": ["openid"]}, - ]} + assert res == { + "providers": [ + {"id": "egi", "issuer": "https://egi.test", "title": "EGI", "scopes": ["openid"]}, + {"id": "x-agg", "issuer": "https://x.test", "title": "X (agg)", "scopes": ["openid"]}, + {"id": "y-agg", "issuer": "https://y.test", "title": "Y (agg)", "scopes": ["openid"]}, + {"id": "z-agg", "issuer": "https://z.test", "title": "Z (agg)", "scopes": ["openid"]}, + ] + } def test_me_unauthorized(self, api100): api100.get("/me").assert_error(401, "AuthenticationRequired") @@ -349,8 +350,7 @@ def test_basic_auth(self, api100_with_entitlement_check, caplog): api100_with_entitlement_check.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100_with_entitlement_check.get("/me") res.assert_error( - 403, "PermissionsInsufficient", - message="An EGI account is required for using openEO Platform." + 403, "PermissionsInsufficient", message="An EGI account is required for using openEO Platform." ) warnings = "\n".join(r.getMessage() for r in caplog.records if r.levelno == logging.WARNING) assert re.search(r"internal_auth_data.*authentication_method.*basic", warnings) @@ -360,16 +360,17 @@ def get_userinfo(request: requests.Request, context): assert request.headers["Authorization"] == "Bearer funiculifunicula" return {"sub": "john"} - requests_mock.get("https://egi.test/.well-known/openid-configuration", json={ - "userinfo_endpoint": "https://egi.test/userinfo" - }) + requests_mock.get( + "https://egi.test/.well-known/openid-configuration", json={"userinfo_endpoint": "https://egi.test/userinfo"} + ) requests_mock.get("https://egi.test/userinfo", json=get_userinfo) api100_with_entitlement_check.set_auth_bearer_token(token="oidc/egi/funiculifunicula") res = api100_with_entitlement_check.get("/me") res.assert_error( - 403, "PermissionsInsufficient", - message="Proper enrollment in openEO Platform virtual organization is required." + 403, + "PermissionsInsufficient", + message="Proper enrollment in openEO Platform virtual organization is required.", ) warnings = "\n".join(r.getMessage() for r in caplog.records if r.levelno == logging.WARNING) assert re.search(r"KeyError.*eduperson_entitlement", warnings) @@ -377,50 +378,50 @@ def get_userinfo(request: requests.Request, context): def _get_userifo_handler(self, eduperson_entitlement: List[str], bearer_token: str = "funiculifunicula"): def get_userinfo(request: requests.Request, context): assert request.headers["Authorization"] == f"Bearer {bearer_token}" - return { - "sub": "john", - "eduperson_entitlement": eduperson_entitlement - } + return {"sub": "john", "eduperson_entitlement": eduperson_entitlement} return get_userinfo - @pytest.mark.parametrize(["eduperson_entitlement", "warn_regex"], [ - ( + @pytest.mark.parametrize( + ["eduperson_entitlement", "warn_regex"], + [ + ( [], r"eduperson_entitlements['\": ]*\[\]", - ), - ( + ), + ( ["urn:mace:egi.eu:group:vo.openeo.test:role=foo#test"], r"eduperson_entitlements.*vo\.openeo\.test:role=foo", - ), - ( + ), + ( ["urn:mace:egi.eu:group:vo.openeo.cloud:role=foo#aai.egi.eu"], r"eduperson_entitlements.*vo\.openeo\.cloud:role=foo", - ), - ( + ), + ( [ "urn:mace:egi.eu:group:vo.openeo.cloud:role=foo#test", "urn:mace:egi.eu:group:vo.openeo.cloud:role=member#test", ], r"eduperson_entitlements.*vo\.openeo\.cloud:role=member", - ) - ]) + ), + ], + ) def test_oidc_not_enrolled( - self, api100_with_entitlement_check, requests_mock, caplog, eduperson_entitlement, warn_regex + self, api100_with_entitlement_check, requests_mock, caplog, eduperson_entitlement, warn_regex ): - requests_mock.get("https://egi.test/.well-known/openid-configuration", json={ - "userinfo_endpoint": "https://egi.test/userinfo" - }) requests_mock.get( - "https://egi.test/userinfo", - json=self._get_userifo_handler(eduperson_entitlement=eduperson_entitlement) + "https://egi.test/.well-known/openid-configuration", json={"userinfo_endpoint": "https://egi.test/userinfo"} + ) + requests_mock.get( + "https://egi.test/userinfo", json=self._get_userifo_handler(eduperson_entitlement=eduperson_entitlement) ) api100_with_entitlement_check.set_auth_bearer_token(token="oidc/egi/funiculifunicula") res = api100_with_entitlement_check.get("/me") res.assert_error( - 403, "PermissionsInsufficient", - message="Proper enrollment in openEO Platform virtual organization is required." + 403, + "PermissionsInsufficient", + message="Proper enrollment in openEO Platform virtual organization is required.", ) warnings = "\n".join(r.getMessage() for r in caplog.records if r.levelno == logging.WARNING) assert re.search(r"user_id.*john", warnings) @@ -454,12 +455,11 @@ def test_oidc_not_enrolled( ], ) def test_oidc_enrolled(self, api100_with_entitlement_check, requests_mock, eduperson_entitlement, expected_roles): - requests_mock.get("https://egi.test/.well-known/openid-configuration", json={ - "userinfo_endpoint": "https://egi.test/userinfo" - }) requests_mock.get( - "https://egi.test/userinfo", - json=self._get_userifo_handler(eduperson_entitlement=eduperson_entitlement) + "https://egi.test/.well-known/openid-configuration", json={"userinfo_endpoint": "https://egi.test/userinfo"} + ) + requests_mock.get( + "https://egi.test/userinfo", json=self._get_userifo_handler(eduperson_entitlement=eduperson_entitlement) ) api100_with_entitlement_check.set_auth_bearer_token(token="oidc/egi/funiculifunicula") @@ -493,9 +493,11 @@ def test_issuer_url_normalization(self, requests_mock, backend1, backend2, white requests_mock.get(oidc_url_conf, json={"userinfo_endpoint": oidc_url_ui}) requests_mock.get( oidc_url_ui, - json=self._get_userifo_handler(eduperson_entitlement=[ - "urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu", - ]) + json=self._get_userifo_handler( + eduperson_entitlement=[ + "urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu", + ] + ), ) with config_overrides( oidc_providers=[OidcProvider(id="egi", issuer=oidc_issuer, title="EGI")], @@ -515,7 +517,7 @@ def test_issuer_url_normalization(self, requests_mock, backend1, backend2, white res.assert_error(403, "PermissionsInsufficient") assert re.search( "user_access_validation failure.*oidc_issuer.*https://egi.test/bar.*issuer_whitelist.*https://egi.test/foo", - caplog.text + caplog.text, ) @@ -570,10 +572,10 @@ def test_processes_basic(self, api100, requests_mock, backend1, backend2): ], "returns": {"schema": {}}, "federation:backends": ["b1"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "mean", @@ -587,10 +589,10 @@ def test_processes_basic(self, api100, requests_mock, backend1, backend2): ], "returns": {"schema": {"type": "number"}}, "federation:backends": ["b1", "b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "multiply", @@ -601,10 +603,10 @@ def test_processes_basic(self, api100, requests_mock, backend1, backend2): ], "returns": {"schema": {}}, "federation:backends": ["b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, ], "links": [], @@ -626,23 +628,21 @@ def test_processes_basic(self, api100, requests_mock, backend1, backend2): ], "returns": {"schema": {}}, "federation:backends": ["b1"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "mean", "description": "mean", - "parameters": [ - {"name": "data", "schema": {}, "description": "data"} - ], + "parameters": [{"name": "data", "schema": {}, "description": "data"}], "returns": {"schema": {}}, "federation:backends": ["b1"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, ], ), @@ -656,10 +656,10 @@ def test_processes_basic(self, api100, requests_mock, backend1, backend2): "parameters": [{"name": "data", "schema": {}, "description": "data"}], "returns": {"schema": {}}, "federation:backends": ["b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, { "id": "multiply", @@ -670,10 +670,10 @@ def test_processes_basic(self, api100, requests_mock, backend1, backend2): ], "returns": {"schema": {}}, "federation:backends": ["b2"], - 'deprecated': False, - 'experimental': False, - 'examples': [], - 'links': [] + "deprecated": False, + "experimental": False, + "examples": [], + "links": [], }, ], ), @@ -733,7 +733,7 @@ def test_result_basic_math_basic_auth(self, api100, requests_mock, backend1, bac def post_result(request: requests.Request, context): assert request.headers["Authorization"] == TEST_USER_AUTH_HEADER["Authorization"] pg = request.json()["process"]["process_graph"] - (_, node), = pg.items() + ((_, node),) = pg.items() assert node["process_id"] == "add" assert node["result"] is True context.headers["Content-Type"] = "application/json" @@ -754,15 +754,15 @@ def get_userinfo(request: requests.Request, context): def post_result(request: requests.Request, context): assert request.headers["Authorization"] == "Bearer oidc/egi/funiculifunicula" pg = request.json()["process"]["process_graph"] - (_, node), = pg.items() + ((_, node),) = pg.items() assert node["process_id"] == "add" assert node["result"] is True context.headers["Content-Type"] = "application/json" return node["arguments"]["x"] + node["arguments"]["y"] - requests_mock.get("https://egi.test/.well-known/openid-configuration", json={ - "userinfo_endpoint": "https://egi.test/userinfo" - }) + requests_mock.get( + "https://egi.test/.well-known/openid-configuration", json={"userinfo_endpoint": "https://egi.test/userinfo"} + ) requests_mock.get("https://egi.test/userinfo", json=get_userinfo) requests_mock.post(backend1 + "/result", json=post_result) @@ -807,15 +807,34 @@ def post_result(request: requests.Request, context): assert len(next(chunks)) == chunk_size assert len(res.data) == 1000 - 2 * chunk_size - @pytest.mark.parametrize(["cid", "call_counts"], [ - ("S1", (1, 0)), - ("S10", (1, 0)), - ("S2", (0, 1)), - ("S20", (0, 1)), - ]) + @pytest.mark.parametrize( + ["cid", "call_counts"], + [ + ("S1", (1, 0)), + ("S10", (1, 0)), + ("S2", (0, 1)), + ("S20", (0, 1)), + ], + ) def test_result_backend_by_collection(self, api100, requests_mock, backend1, backend2, cid, call_counts): - requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S1"}, {"id": "S10"}, ]}) - requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}, {"id": "S20"}, ]}) + requests_mock.get( + backend1 + "/collections", + json={ + "collections": [ + {"id": "S1"}, + {"id": "S10"}, + ] + }, + ) + requests_mock.get( + backend2 + "/collections", + json={ + "collections": [ + {"id": "S2"}, + {"id": "S20"}, + ] + }, + ) def post_result(request: requests.Request, context): assert request.headers["Authorization"] == TEST_USER_AUTH_HEADER["Authorization"] @@ -832,9 +851,7 @@ def post_result(request: requests.Request, context): assert res.json == 123 assert (b1_mock.call_count, b2_mock.call_count) == call_counts - def test_processes_different_versions( - self, api100, requests_mock, backend1, backend2, mbldr - ): + def test_processes_different_versions(self, api100, requests_mock, backend1, backend2, mbldr): """ This used to fail with OpenEOApiException: Only single version is supported, but found: {'1.2.3', '1.3.5'} @@ -856,8 +873,24 @@ def test_processes_different_versions( def test_result_backend_by_collection_multiple_hits(self, api100, requests_mock, backend1, backend2, caplog): caplog.set_level(logging.WARNING) - requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S1"}, {"id": "S2"}, ]}) - requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}, {"id": "S3"}, ]}) + requests_mock.get( + backend1 + "/collections", + json={ + "collections": [ + {"id": "S1"}, + {"id": "S2"}, + ] + }, + ) + requests_mock.get( + backend2 + "/collections", + json={ + "collections": [ + {"id": "S2"}, + {"id": "S3"}, + ] + }, + ) def post_result(request: requests.Request, context): assert request.headers["Authorization"] == TEST_USER_AUTH_HEADER["Authorization"] @@ -886,12 +919,15 @@ def test_result_backend_by_collection_collection_not_found(self, api100, request res = api100.post("/result", json={"process": {"process_graph": pg}}) res.assert_error(404, "CollectionNotFound", "Collection 'S3' does not exist") - @pytest.mark.parametrize("pg", [ - {"lc": {}}, - {"lc": {"foo": "bar"}}, - {"lc": {"process_id": "load_collection"}}, - {"lc": {"process_id": "load_collection", "arguments": {}}}, - ]) + @pytest.mark.parametrize( + "pg", + [ + {"lc": {}}, + {"lc": {"foo": "bar"}}, + {"lc": {"process_id": "load_collection"}}, + {"lc": {"process_id": "load_collection", "arguments": {}}}, + ], + ) def test_result_backend_by_collection_invalid_pg(self, api100, requests_mock, backend1, backend2, pg): requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S1"}]}) requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) @@ -1006,14 +1042,17 @@ def b2_post_result(request: requests.Request, context): assert res.json == 222 assert (b1_mock.call_count, b2_mock.call_count) == (1, 1) - @pytest.mark.parametrize(["job_id", "s2_backend", "expected_success"], [ - ("b1-b6tch-j08", 1, True), - ("b2-b6tch-j08", 1, False), - ("b1-b6tch-j08", 2, False), - ("b2-b6tch-j08", 2, True), - ]) + @pytest.mark.parametrize( + ["job_id", "s2_backend", "expected_success"], + [ + ("b1-b6tch-j08", 1, True), + ("b2-b6tch-j08", 1, False), + ("b1-b6tch-j08", 2, False), + ("b2-b6tch-j08", 2, True), + ], + ) def test_load_result_job_id_parsing_with_load_collection( - self, api100, requests_mock, backend1, backend2, job_id, s2_backend, expected_success + self, api100, requests_mock, backend1, backend2, job_id, s2_backend, expected_success ): """Issue #19: strip backend prefix from job_id in load_result""" @@ -1041,16 +1080,19 @@ def post_result(request: requests.Request, context): api100.post("/result", json=request).assert_error(400, "BackendLookupFailure") assert (b1_mock.call_count, b2_mock.call_count) == (0, 0) - @pytest.mark.parametrize(["job_id", "s2_backend", "expected_success"], [ - ("b1-b6tch-j08", 1, True), - ("b2-b6tch-j08", 1, False), - ("b1-b6tch-j08", 2, False), - ("b2-b6tch-j08", 2, True), - ("https://example.com/ml_model_metadata.json", 1, True), # In this case it picks the first backend. - ("https://example.com/ml_model_metadata.json", 2, True), - ]) + @pytest.mark.parametrize( + ["job_id", "s2_backend", "expected_success"], + [ + ("b1-b6tch-j08", 1, True), + ("b2-b6tch-j08", 1, False), + ("b1-b6tch-j08", 2, False), + ("b2-b6tch-j08", 2, True), + ("https://example.com/ml_model_metadata.json", 1, True), # In this case it picks the first backend. + ("https://example.com/ml_model_metadata.json", 2, True), + ], + ) def test_load_result_job_id_parsing_with_load_ml_model( - self, api100, requests_mock, backend1, backend2, job_id, s2_backend, expected_success + self, api100, requests_mock, backend1, backend2, job_id, s2_backend, expected_success ): """Issue #70: random forest: providing training job with aggregator job id fails""" @@ -1086,9 +1128,7 @@ def post_result(request: requests.Request, context): "https://external.test/bla/bla", ], ) - def test_load_result_http_reference( - self, api100, requests_mock, backend1, backend2, result_id - ): + def test_load_result_http_reference(self, api100, requests_mock, backend1, backend2, result_id): """Support load_result with HTTP references (instead of job id)""" def b1_post_result(request: requests.Request, context): @@ -1125,11 +1165,7 @@ def b1_post_result(request: requests.Request, context): ( "blargh", (1, 0), - [ - RegexMatcher( - "Multiple back-end candidates.*Naively picking first one" - ) - ], + [RegexMatcher("Multiple back-end candidates.*Naively picking first one")], ), ("wibble", (1, 0), []), ("snorfle", (0, 1), []), @@ -1138,9 +1174,7 @@ def b1_post_result(request: requests.Request, context): (1, 0), [ RegexMatcher("Skipping unknown process 'frobnicate'"), - RegexMatcher( - "Multiple back-end candidates.*Naively picking first one" - ), + RegexMatcher("Multiple back-end candidates.*Naively picking first one"), ], ), ], @@ -1170,10 +1204,7 @@ def test_result_backend_by_process( ) def post_result(request: requests.Request, context): - assert ( - request.headers["Authorization"] - == TEST_USER_AUTH_HEADER["Authorization"] - ) + assert request.headers["Authorization"] == TEST_USER_AUTH_HEADER["Authorization"] assert request.json()["process"]["process_graph"] == pg context.headers["Content-Type"] = "application/json" return 123 @@ -1296,18 +1327,27 @@ def test_validation_upstream_failure(self, api100, requests_mock, backend1, back class TestBatchJobs: - def test_list_jobs_no_auth(self, api100): api100.get("/jobs").assert_error(401, "AuthenticationRequired") def test_list_jobs_basic(self, api100, requests_mock, backend1, backend2): - requests_mock.get(backend1 + "/jobs", json={"jobs": [ - {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, - {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z", "title": "Job number 8."}, - ]}) - requests_mock.get(backend2 + "/jobs", json={"jobs": [ - {"id": "job05", "status": "running", "created": "2021-06-05T12:34:56Z"}, - ]}) + requests_mock.get( + backend1 + "/jobs", + json={ + "jobs": [ + {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, + {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z", "title": "Job number 8."}, + ] + }, + ) + requests_mock.get( + backend2 + "/jobs", + json={ + "jobs": [ + {"id": "job05", "status": "running", "created": "2021-06-05T12:34:56Z"}, + ] + }, + ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get("/jobs").assert_status_code(200).json assert res == { @@ -1353,9 +1393,10 @@ def b2_get_jobs(request, context): @pytest.mark.parametrize("b2_oidc_pid", ["egi", "aho"]) def test_list_jobs_oidc_pid_mapping(self, requests_mock, backend1, backend2, b2_oidc_pid): # Override /credentials/oidc of backend2 before building flask app and ApiTester - requests_mock.get(backend2 + "/credentials/oidc", json={"providers": [ - {"id": b2_oidc_pid, "issuer": "https://egi.test", "title": "EGI"} - ]}) + requests_mock.get( + backend2 + "/credentials/oidc", + json={"providers": [{"id": b2_oidc_pid, "issuer": "https://egi.test", "title": "EGI"}]}, + ) api100 = get_api100(get_flask_app()) # OIDC setup @@ -1363,23 +1404,27 @@ def get_userinfo(request: requests.Request, context): assert request.headers["Authorization"] == "Bearer t0k3n" return {"sub": "john"} - requests_mock.get("https://egi.test/.well-known/openid-configuration", json={ - "userinfo_endpoint": "https://egi.test/userinfo" - }) + requests_mock.get( + "https://egi.test/.well-known/openid-configuration", json={"userinfo_endpoint": "https://egi.test/userinfo"} + ) requests_mock.get("https://egi.test/userinfo", json=get_userinfo) def b1_get_jobs(request, context): assert request.headers["Authorization"] == "Bearer oidc/egi/t0k3n" - return {"jobs": [ - {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, - {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z"}, - ]} + return { + "jobs": [ + {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, + {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z"}, + ] + } def b2_get_jobs(request, context): assert request.headers["Authorization"] == f"Bearer oidc/{b2_oidc_pid}/t0k3n" - return {"jobs": [ - {"id": "job05", "status": "running", "created": "2021-06-05T12:34:56Z"}, - ]} + return { + "jobs": [ + {"id": "job05", "status": "running", "created": "2021-06-05T12:34:56Z"}, + ] + } requests_mock.get(backend1 + "/jobs", json=b1_get_jobs) requests_mock.get(backend2 + "/jobs", json=b2_get_jobs) @@ -1394,10 +1439,15 @@ def b2_get_jobs(request, context): @pytest.mark.parametrize("status_code", [204, 303, 404, 500]) def test_list_jobs_failing_backend(self, api100, requests_mock, backend1, backend2, caplog, status_code): - requests_mock.get(backend1 + "/jobs", json={"jobs": [ - {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, - {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z"}, - ]}) + requests_mock.get( + backend1 + "/jobs", + json={ + "jobs": [ + {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, + {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z"}, + ] + }, + ) requests_mock.get(backend2 + "/jobs", status_code=status_code, json={"code": "nope", "message": "and nope"}) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get("/jobs").assert_status_code(200).json @@ -1414,10 +1464,15 @@ def test_list_jobs_failing_backend(self, api100, requests_mock, backend1, backen assert "Failed to get job listing from backend 'b2'" in warnings def test_list_jobs_offline_backend(self, api100, requests_mock, backend1, backend2, caplog): - requests_mock.get(backend1 + "/jobs", json={"jobs": [ - {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, - {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z"}, - ]}) + requests_mock.get( + backend1 + "/jobs", + json={ + "jobs": [ + {"id": "job03", "status": "running", "created": "2021-06-03T12:34:56Z"}, + {"id": "job08", "status": "running", "created": "2021-06-08T12:34:56Z"}, + ] + }, + ) requests_mock.get(backend2 + "/", status_code=500, json={"code": "nope", "message": "completely down!"}) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) @@ -1495,9 +1550,7 @@ def post_jobs(request: requests.Request, context): res = api100.post("/jobs", json={"process": {"process_graph": pg}}).assert_status_code(201) assert res.headers["Location"] == "http://oeoa.test/openeo/1.0.0/jobs/b1-th3j0b" assert res.headers["OpenEO-Identifier"] == "b1-th3j0b" - assert jobs == [ - {"process": {"process_graph": pg}} - ] + assert jobs == [{"process": {"process_graph": pg}}] def test_create_job_options(self, api100, requests_mock, backend1): requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) @@ -1517,33 +1570,42 @@ def post_jobs(request: requests.Request, context): pg = {"lc": {"process_id": "load_collection", "arguments": {"id": "S2"}, "result": True}} api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) - res = api100.post("/jobs", json={ - "process": {"process_graph": pg}, - "title": "da job", - "plan": "free-tier", - "job_options": {"side": "salad"}, - "something else": "whatever", - }).assert_status_code(201) + res = api100.post( + "/jobs", + json={ + "process": {"process_graph": pg}, + "title": "da job", + "plan": "free-tier", + "job_options": {"side": "salad"}, + "something else": "whatever", + }, + ).assert_status_code(201) assert res.headers["Location"] == "http://oeoa.test/openeo/1.0.0/jobs/b1-th3j0b" assert res.headers["OpenEO-Identifier"] == "b1-th3j0b" - @pytest.mark.parametrize("body", [ - {"foo": "meh"}, - {"process": "meh"}, - ]) + @pytest.mark.parametrize( + "body", + [ + {"foo": "meh"}, + {"process": "meh"}, + ], + ) def test_create_job_pg_missing(self, api100, requests_mock, backend1, body): requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.post("/jobs", json=body) res.assert_error(400, "ProcessGraphMissing") - @pytest.mark.parametrize("body", [ - {"process": {"process_graph": "meh"}}, - {"process": {"process_graph": {}}}, - {"process": {"process_graph": {"foo": "meh"}}}, - {"process": {"process_graph": {"foo": {"bar": "meh"}}}}, - {"process": {"process_graph": {"foo": {"process_id": "meh"}}}}, - ]) + @pytest.mark.parametrize( + "body", + [ + {"process": {"process_graph": "meh"}}, + {"process": {"process_graph": {}}}, + {"process": {"process_graph": {"foo": "meh"}}}, + {"process": {"process_graph": {"foo": {"bar": "meh"}}}}, + {"process": {"process_graph": {"foo": {"process_id": "meh"}}}}, + ], + ) def test_create_job_pg_invalid(self, api100, requests_mock, backend1, body): requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) requests_mock.post( @@ -1574,15 +1636,9 @@ def post_jobs(request: requests.Request, context): ["force_backend", "expected"], [("b1", "b1"), ("b2", "b2"), (None, "b1")], ) - def test_create_job_force_backend( - self, api100, requests_mock, backend1, backend2, force_backend, expected - ): - requests_mock.get( - backend1 + "/collections", json={"collections": [{"id": "S2"}]} - ) - requests_mock.get( - backend2 + "/collections", json={"collections": [{"id": "S2"}]} - ) + def test_create_job_force_backend(self, api100, requests_mock, backend1, backend2, force_backend, expected): + requests_mock.get(backend1 + "/collections", json={"collections": [{"id": "S2"}]}) + requests_mock.get(backend2 + "/collections", json={"collections": [{"id": "S2"}]}) jobs = [] @@ -1615,10 +1671,7 @@ def post_jobs(request: requests.Request, context): "job_options": job_options, }, ).assert_status_code(201) - assert ( - res.headers["Location"] - == f"http://oeoa.test/openeo/1.0.0/jobs/{expected}-th3j0b" - ) + assert res.headers["Location"] == f"http://oeoa.test/openeo/1.0.0/jobs/{expected}-th3j0b" assert res.headers["OpenEO-Identifier"] == f"{expected}-th3j0b" assert jobs == [{"process": {"process_graph": pg}}] @@ -1654,28 +1707,39 @@ def post_jobs(request: requests.Request, context): assert validation_mock.call_count == 0 def test_get_job_metadata(self, api100, requests_mock, backend1): - requests_mock.get(backend1 + "/jobs/th3j0b", json={ - "id": "th3j0b", - "title": "The job", "description": "Just doing my job.", - "process": {"process_graph": { - "lc": {"process_id": "load_collection", "arguments": {"id": "S2"}, "result": True} - }}, - "status": "running", "progress": 42, "created": "2017-01-01T09:32:12Z", - "usage": { - "cpu": {"value": 1000, "unit": "cpu-seconds"}, - "memory": {"value": 2000, "unit": "mb-seconds"}, - "duration": {"value": 3000, "unit": "seconds"}, + requests_mock.get( + backend1 + "/jobs/th3j0b", + json={ + "id": "th3j0b", + "title": "The job", + "description": "Just doing my job.", + "process": { + "process_graph": { + "lc": {"process_id": "load_collection", "arguments": {"id": "S2"}, "result": True} + } + }, + "status": "running", + "progress": 42, + "created": "2017-01-01T09:32:12Z", + "usage": { + "cpu": {"value": 1000, "unit": "cpu-seconds"}, + "memory": {"value": 2000, "unit": "mb-seconds"}, + "duration": {"value": 3000, "unit": "seconds"}, + }, }, - }) + ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get("/jobs/b1-th3j0b").assert_status_code(200) assert res.json == { "id": "b1-th3j0b", - "title": "The job", "description": "Just doing my job.", - "process": {"process_graph": { - "lc": {"process_id": "load_collection", "arguments": {"id": "S2"}, "result": True} - }}, - "status": "running", "progress": 42, "created": "2017-01-01T09:32:12Z", + "title": "The job", + "description": "Just doing my job.", + "process": { + "process_graph": {"lc": {"process_id": "load_collection", "arguments": {"id": "S2"}, "result": True}} + }, + "status": "running", + "progress": 42, + "created": "2017-01-01T09:32:12Z", "usage": { "cpu": {"value": 1000, "unit": "cpu-seconds"}, "memory": {"value": 2000, "unit": "mb-seconds"}, @@ -1687,7 +1751,8 @@ def test_get_job_metadata(self, api100, requests_mock, backend1): def test_get_job_metadata_not_found_on_backend(self, api100, requests_mock, backend1, job_id): requests_mock.get( backend1 + f"/jobs/{job_id}", - status_code=JobNotFoundException.status_code, json=JobNotFoundException(job_id=job_id).to_dict() + status_code=JobNotFoundException.status_code, + json=JobNotFoundException(job_id=job_id).to_dict(), ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get(f"/jobs/b1-{job_id}") @@ -1700,9 +1765,14 @@ def test_get_job_metadata_not_found_on_aggregator(self, api100): def test_start_job(self, api100, requests_mock, backend1): m = requests_mock.post(backend1 + "/jobs/th3j0b/results", status_code=202) - requests_mock.get(backend1 + "/jobs/th3j0b", json={ - "id": "th3j0b", "status": "created", "created": "2017-01-01T09:32:12Z", - }) + requests_mock.get( + backend1 + "/jobs/th3j0b", + json={ + "id": "th3j0b", + "status": "created", + "created": "2017-01-01T09:32:12Z", + }, + ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) api100.post("/jobs/b1-th3j0b/results").assert_status_code(202) assert m.call_count == 1 @@ -1711,7 +1781,8 @@ def test_start_job(self, api100, requests_mock, backend1): def test_start_job_not_found_on_backend(self, api100, requests_mock, backend1, job_id): m = requests_mock.get( backend1 + f"/jobs/{job_id}", - status_code=JobNotFoundException.status_code, json=JobNotFoundException(job_id=job_id).to_dict() + status_code=JobNotFoundException.status_code, + json=JobNotFoundException(job_id=job_id).to_dict(), ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.post(f"/jobs/b1-{job_id}/results") @@ -1733,7 +1804,8 @@ def test_cancel_job(self, api100, requests_mock, backend1): def test_cancel_job_not_found_on_backend(self, api100, requests_mock, backend1, job_id): m = requests_mock.delete( backend1 + f"/jobs/{job_id}/results", - status_code=JobNotFoundException.status_code, json=JobNotFoundException(job_id=job_id).to_dict() + status_code=JobNotFoundException.status_code, + json=JobNotFoundException(job_id=job_id).to_dict(), ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.delete(f"/jobs/b1-{job_id}/results") @@ -1755,7 +1827,8 @@ def test_delete_job(self, api100, requests_mock, backend1): def test_delete_job_not_found_on_backend(self, api100, requests_mock, backend1, job_id): m = requests_mock.delete( backend1 + f"/jobs/{job_id}", - status_code=JobNotFoundException.status_code, json=JobNotFoundException(job_id=job_id).to_dict() + status_code=JobNotFoundException.status_code, + json=JobNotFoundException(job_id=job_id).to_dict(), ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.delete(f"/jobs/b1-{job_id}") @@ -1768,21 +1841,31 @@ def test_delete_job_not_found_on_aggregator(self, api100): res.assert_error(404, "JobNotFound", message="The batch job 'nope-and-nope' does not exist.") def test_get_results(self, api100, requests_mock, backend1): - m1 = requests_mock.get(backend1 + "/jobs/th3j0b", json={ - "id": "th3j0b", - "title": "The job", "description": "Just doing my job.", - "status": "finished", "progress": 100, "created": "2017-01-01T09:32:12Z", - }) - m2 = requests_mock.get(backend1 + "/jobs/th3j0b/results", status_code=200, json={ - "assets": { - "r1.tiff": { - "href": "https//res.b1.test/123/r1.tiff", - "title": "Result 1", - "type": "image/tiff; application=geotiff", - "roles": ["data", "testing"], + m1 = requests_mock.get( + backend1 + "/jobs/th3j0b", + json={ + "id": "th3j0b", + "title": "The job", + "description": "Just doing my job.", + "status": "finished", + "progress": 100, + "created": "2017-01-01T09:32:12Z", + }, + ) + m2 = requests_mock.get( + backend1 + "/jobs/th3j0b/results", + status_code=200, + json={ + "assets": { + "r1.tiff": { + "href": "https//res.b1.test/123/r1.tiff", + "title": "Result 1", + "type": "image/tiff; application=geotiff", + "roles": ["data", "testing"], + } } - } - }) + }, + ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get("/jobs/b1-th3j0b/results").assert_status_code(200).json assert m1.call_count == 1 @@ -1799,26 +1882,37 @@ def test_get_results(self, api100, requests_mock, backend1): assert res["type"] == "Feature" assert_dict_subset( {"title": "The job", "created": "2017-01-01T09:32:12Z", "description": "Just doing my job."}, - res["properties"] + res["properties"], ) @pytest.mark.parametrize("job_status", ["created", "running", "canceled", "error"]) def test_get_results_not_finished(self, api100, requests_mock, backend1, job_status): - requests_mock.get(backend1 + "/jobs/th3j0b", json={ - "id": "th3j0b", "status": job_status, "created": "2017-01-01T09:32:12Z", - }) + requests_mock.get( + backend1 + "/jobs/th3j0b", + json={ + "id": "th3j0b", + "status": job_status, + "created": "2017-01-01T09:32:12Z", + }, + ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get("/jobs/b1-th3j0b/results") res.assert_error(JobNotFinishedException.status_code, "JobNotFinished") def test_get_results_finished_unreliable(self, api100, requests_mock, backend1): """Edge case: job status is 'finished', but results still return with 'JobNotFinished'.""" - m1 = requests_mock.get(backend1 + "/jobs/th3j0b", json={ - "id": "th3j0b", "status": "finished", "created": "2017-01-01T09:32:12Z", - }) + m1 = requests_mock.get( + backend1 + "/jobs/th3j0b", + json={ + "id": "th3j0b", + "status": "finished", + "created": "2017-01-01T09:32:12Z", + }, + ) m2 = requests_mock.get( backend1 + "/jobs/th3j0b/results", - status_code=JobNotFinishedException.status_code, json=JobNotFinishedException().to_dict() + status_code=JobNotFinishedException.status_code, + json=JobNotFinishedException().to_dict(), ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get("/jobs/b1-th3j0b/results") @@ -1830,7 +1924,8 @@ def test_get_results_finished_unreliable(self, api100, requests_mock, backend1): def test_get_results_not_found_on_backend(self, api100, requests_mock, backend1, job_id): requests_mock.get( backend1 + f"/jobs/{job_id}", - status_code=JobNotFoundException.status_code, json=JobNotFoundException(job_id=job_id).to_dict() + status_code=JobNotFoundException.status_code, + json=JobNotFoundException(job_id=job_id).to_dict(), ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get(f"/jobs/b1-{job_id}/results") @@ -1923,10 +2018,12 @@ def test_get_results_links(self, api100, requests_mock, backend1): def test_get_logs(self, api100, requests_mock, backend1): def get_logs(request, context): offset = request.qs.get("offset", ["_"])[0] - return {"logs": [ - {"id": offset + "1", "level": "info", "message": "hello"}, - {"id": offset + "11", "level": "info", "message": "hello"}, - ]} + return { + "logs": [ + {"id": offset + "1", "level": "info", "message": "hello"}, + {"id": offset + "11", "level": "info", "message": "hello"}, + ] + } requests_mock.get(backend1 + "/jobs/th3j0b/logs", status_code=200, json=get_logs) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) @@ -1936,7 +2033,7 @@ def get_logs(request, context): {"id": "_1", "level": "info", "message": "hello"}, {"id": "_11", "level": "info", "message": "hello"}, ], - "links": [] + "links": [], } res = api100.get("/jobs/b1-th3j0b/logs?offset=3").assert_status_code(200).json @@ -1945,14 +2042,15 @@ def get_logs(request, context): {"id": "31", "level": "info", "message": "hello"}, {"id": "311", "level": "info", "message": "hello"}, ], - "links": [] + "links": [], } @pytest.mark.parametrize("job_id", ["th3j0b", "th-3j-0b", "th.3j.0b", "th~3j~0b"]) def test_get_logs_not_found_on_backend(self, api100, requests_mock, backend1, job_id): requests_mock.get( backend1 + f"/jobs/{job_id}/logs", - status_code=JobNotFoundException.status_code, json=JobNotFoundException(job_id=job_id).to_dict() + status_code=JobNotFoundException.status_code, + json=JobNotFoundException(job_id=job_id).to_dict(), ) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) res = api100.get(f"/jobs/b1-{job_id}/logs") @@ -1985,20 +2083,23 @@ def post_jobs(request: requests.Request, context): assert res.headers["OpenEO-Identifier"] == "b1-th3j0b" assert jobs == [ - {"process": {"process_graph": { - "load": {"process_id": "load_result", "arguments": {"id": "b6tch-j08"}, "result": True} - }}} + { + "process": { + "process_graph": { + "load": {"process_id": "load_result", "arguments": {"id": "b6tch-j08"}, "result": True} + } + } + } ] class TestSecondaryServices: - @pytest.fixture def service_metadata_wmts_foo(self): return ServiceMetadata( id="wmts-foo", process={"process_graph": {"foo": {"process_id": "foo", "arguments": {}}}}, - url='https://oeo.net/wmts/foo', + url="https://oeo.net/wmts/foo", type="WMTS", enabled=True, configuration={"version": "0.5.8"}, @@ -2012,79 +2113,64 @@ def service_metadata_wmts_foo(self): "configuration": { "colormap": { "default": "YlGn", - "description": - "The colormap to apply to single band layers", - "type": "string" + "description": "The colormap to apply to single band layers", + "type": "string", }, "version": { "default": "1.0.0", "description": "The WMTS version to use.", "enum": ["1.0.0"], - "type": "string" - } + "type": "string", + }, }, "links": [], "process_parameters": [], - "title": "Web Map Tile Service" + "title": "Web Map Tile Service", } } def test_service_types_simple(self, api100, backend1, requests_mock, mbldr): """Given 2 backends but only 1 backend has a single service, then the aggregator - returns that 1 service's metadata. + returns that 1 service's metadata. """ # Aggregator checks if the backend supports GET /service_types, so we have to mock that up too. - requests_mock.get( - backend1 + "/", json=mbldr.capabilities(secondary_services=True) - ) + requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) # Only need a single service type. single_service_type = self.SERVICE_TYPES_ONLT_WMTS requests_mock.get(backend1 + "/service_types", json=single_service_type) - resp = api100.get('/service_types').assert_status_code(200) + resp = api100.get("/service_types").assert_status_code(200) assert resp.json == single_service_type - def test_service_types_multiple_backends( - self, api100, backend1, backend2, requests_mock, mbldr - ): + def test_service_types_multiple_backends(self, api100, backend1, backend2, requests_mock, mbldr): """Given 2 backends with each 1 service, then the aggregator lists both services.""" # Aggregator checks if the backend supports GET /service_types, so we have to mock that up too. - requests_mock.get( - backend1 + "/", json=mbldr.capabilities(secondary_services=True) - ) - requests_mock.get( - backend2 + "/", json=mbldr.capabilities(secondary_services=True) - ) + requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) + requests_mock.get(backend2 + "/", json=mbldr.capabilities(secondary_services=True)) service_type_1 = { "WMTS": { "configuration": { "colormap": { "default": "YlGn", - "description": - "The colormap to apply to single band layers", - "type": "string" + "description": "The colormap to apply to single band layers", + "type": "string", }, "version": { "default": "1.0.0", "description": "The WMTS version to use.", "enum": ["1.0.0"], - "type": "string" - } + "type": "string", + }, }, "links": [], "process_parameters": [], - "title": "Web Map Tile Service" + "title": "Web Map Tile Service", } } service_type_2 = { - "WMS": { - "title": "OGC Web Map Service", - "configuration": {}, - "process_parameters": [], - "links": [] - } + "WMS": {"title": "OGC Web Map Service", "configuration": {}, "process_parameters": [], "links": []} } requests_mock.get(backend1 + "/service_types", json=service_type_1) requests_mock.get(backend2 + "/service_types", json=service_type_2) @@ -2107,7 +2193,7 @@ def test_service_info(self, api100, backend1, requests_mock): "enabled": "True", "configuration": {"version": "0.5.8"}, "attributes": {}, - "title": "Test WMTS service" + "title": "Test WMTS service", } requests_mock.get(backend1 + "/services/wmts-foo", json=json_wmts_foo) api100.set_auth_bearer_token(token=TEST_USER_BEARER_TOKEN) @@ -2135,9 +2221,7 @@ def test_list_services_only_1_backend(self, api100, requests_mock, backend1, mbl """ # Aggregator checks if the backend supports GET /service_types, so we have to mock that up too. - requests_mock.get( - backend1 + "/", json=mbldr.capabilities(secondary_services=True) - ) + requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) requests_mock.get( backend1 + "/services", json={ @@ -2166,9 +2250,7 @@ def test_list_services_only_1_backend(self, api100, requests_mock, backend1, mbl "links": [], } - def test_list_services_no_supporting_backends( - self, api100, requests_mock, backend1, caplog - ): + def test_list_services_no_supporting_backends(self, api100, requests_mock, backend1, caplog): """None of the upstream backends supports secondary services""" caplog.set_level(logging.ERROR) api100.set_auth_bearer_token(TEST_USER_BEARER_TOKEN) @@ -2198,20 +2280,14 @@ def test_list_services_no_supporting_backends( # list of (logger_name, level, message) tuples. assert not caplog.messages - def test_list_services_basic( - self, api100, requests_mock, backend1, backend2, mbldr - ): + def test_list_services_basic(self, api100, requests_mock, backend1, backend2, mbldr): """ Given 2 backends with each 1 service, then the aggregator lists both services. """ # Aggregator checks if the backend supports GET /service_types, so we have to mock that up too. - requests_mock.get( - backend1 + "/", json=mbldr.capabilities(secondary_services=True) - ) - requests_mock.get( - backend2 + "/", json=mbldr.capabilities(secondary_services=True) - ) + requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) + requests_mock.get(backend2 + "/", json=mbldr.capabilities(secondary_services=True)) requests_mock.get( backend1 + "/services", json={ @@ -2269,20 +2345,14 @@ def test_list_services_basic( "links": [], } - def test_list_services_extended( - self, api100, requests_mock, backend1, backend2, mbldr - ): + def test_list_services_extended(self, api100, requests_mock, backend1, backend2, mbldr): """ Given multiple services across 2 backends, the aggregator lists all service types from all backends. """ # Aggregator checks if the backend supports GET /service_types, so we have to mock that up too. - requests_mock.get( - backend1 + "/", json=mbldr.capabilities(secondary_services=True) - ) - requests_mock.get( - backend2 + "/", json=mbldr.capabilities(secondary_services=True) - ) + requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) + requests_mock.get(backend2 + "/", json=mbldr.capabilities(secondary_services=True)) requests_mock.get( backend1 + "/services", json={ @@ -2306,9 +2376,7 @@ def test_list_services_extended( "title": "openEO", } ], - "process_graph": { - "foo": {"process_id": "foo", "arguments": {}} - }, + "process_graph": {"foo": {"process_id": "foo", "arguments": {}}}, }, "configuration": {"version": "1.3.0"}, "attributes": {"layers": ["ndvi", "evi"]}, @@ -2354,8 +2422,7 @@ def test_list_services_extended( "url": "https://example.openeo.org/wms/wms-nvdi", "configuration": {"version": "1.3.0"}, "created": "2017-01-01T09:32:12Z", - "description": "Deriving minimum NDVI measurements over pixel " - "time series of Sentinel 2", + "description": "Deriving minimum NDVI measurements over pixel " "time series of Sentinel 2", "enabled": True, "title": "NDVI based on Sentinel 2", }, @@ -2384,11 +2451,9 @@ def test_create_wmts(self, api100, requests_mock, backend1, mbldr): api100.set_auth_bearer_token(TEST_USER_BEARER_TOKEN) # Aggregator checks if the backend supports GET /service_types, so we have to mock that up too. - requests_mock.get( - backend1 + "/", json=mbldr.capabilities(secondary_services=True) - ) + requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) - backend_service_id = 'c63d6c27-c4c2-4160-b7bd-9e32f582daec' + backend_service_id = "c63d6c27-c4c2-4160-b7bd-9e32f582daec" expected_agg_id = f"b1-{backend_service_id}" # The aggregator MUST NOT point to the backend instance but to its own endpoint. @@ -2399,25 +2464,19 @@ def test_create_wmts(self, api100, requests_mock, backend1, mbldr): process_graph = {"foo": {"process_id": "foo", "arguments": {}}} post_data = { - "type": 'WMTS', - "process": { - "process_graph": process_graph, - "id": "filter_temporal_wmts" - }, + "type": "WMTS", + "process": {"process_graph": process_graph, "id": "filter_temporal_wmts"}, "title": "My Service", - "description": "Service description" + "description": "Service description", } requests_mock.post( backend1 + "/services", - headers={ - "OpenEO-Identifier": backend_service_id, - "Location": location_backend_1 - }, - status_code=201 + headers={"OpenEO-Identifier": backend_service_id, "Location": location_backend_1}, + status_code=201, ) requests_mock.get(backend1 + "/service_types", json=self.SERVICE_TYPES_ONLT_WMTS) - resp = api100.post('/services', json=post_data).assert_status_code(201) + resp = api100.post("/services", json=post_data).assert_status_code(201) assert resp.headers["OpenEO-Identifier"] == expected_agg_id assert resp.headers["Location"] == expected_location @@ -2431,22 +2490,16 @@ def test_create_wmts_reports_400_client_error(self, api100, requests_mock, backe api100.set_auth_bearer_token(TEST_USER_BEARER_TOKEN) process_graph = {"foo": {"process_id": "foo", "arguments": {}}} post_data = { - "type": 'WMTS', - "process": { - "process_graph": process_graph, - "id": "filter_temporal_wmts" - }, + "type": "WMTS", + "process": {"process_graph": process_graph, "id": "filter_temporal_wmts"}, "title": "My Service", - "description": "Service description" + "description": "Service description", } # TODO: In theory we should make the backend report a HTTP 400 status and then the aggregator # should also report HTTP 400. But in fact that comes back as HTTP 500. - requests_mock.post( - backend1 + "/services", - exc=exception_class("Testing exception handling") - ) + requests_mock.post(backend1 + "/services", exc=exception_class("Testing exception handling")) - resp = api100.post('/services', json=post_data) + resp = api100.post("/services", json=post_data) assert resp.status_code == 400 # OpenEoApiError, OpenEoRestError: more general errors we can expect to lead to a HTTP 500 server error. @@ -2464,23 +2517,18 @@ def test_create_wmts_reports_500_server_error(self, api100, requests_mock, backe api100.set_auth_bearer_token(TEST_USER_BEARER_TOKEN) # Aggregator checks if the backend supports GET /service_types, so we have to mock that up too. - requests_mock.get( - backend1 + "/", json=mbldr.capabilities(secondary_services=True) - ) + requests_mock.get(backend1 + "/", json=mbldr.capabilities(secondary_services=True)) process_graph = {"foo": {"process_id": "foo", "arguments": {}}} post_data = { - "type": 'WMTS', - "process": { - "process_graph": process_graph, - "id": "filter_temporal_wmts" - }, + "type": "WMTS", + "process": {"process_graph": process_graph, "id": "filter_temporal_wmts"}, "title": "My Service", - "description": "Service description" + "description": "Service description", } requests_mock.post(backend1 + "/services", exc=exception_factory("Testing exception handling")) requests_mock.get(backend1 + "/service_types", json=self.SERVICE_TYPES_ONLT_WMTS) - resp = api100.post('/services', json=post_data) + resp = api100.post("/services", json=post_data) assert resp.status_code == 500 def test_remove_service_succeeds(self, api100, requests_mock, backend1): @@ -2525,9 +2573,7 @@ def test_remove_service_backend_response_is_an_error_status( api100.set_auth_bearer_token(TEST_USER_BEARER_TOKEN) requests_mock.get( - backend1 + "/services/wmts-foo", - json=service_metadata_wmts_foo.prepare_for_json(), - status_code=200 + backend1 + "/services/wmts-foo", json=service_metadata_wmts_foo.prepare_for_json(), status_code=200 ) mock_delete = requests_mock.delete( backend1 + "/services/wmts-foo", @@ -2536,8 +2582,8 @@ def test_remove_service_backend_response_is_an_error_status( "id": "936DA01F-9ABD-4D9D-80C7-02AF85C822A8", "code": "ErrorRemovingService", "message": "Service 'wmts-foo' could not be removed.", - "url": "https://example.openeo.org/docs/errors/SampleError" - } + "url": "https://example.openeo.org/docs/errors/SampleError", + }, ) resp = api100.delete("/services/b1-wmts-foo") @@ -2547,9 +2593,7 @@ def test_remove_service_backend_response_is_an_error_status( # so we can reasonably assume that is where the error came from. assert mock_delete.called - def test_update_service_service_succeeds( - self, api100, backend1, requests_mock, service_metadata_wmts_foo - ): + def test_update_service_service_succeeds(self, api100, backend1, requests_mock, service_metadata_wmts_foo): """When it receives an existing service ID and a correct payload, it updates the expected service.""" api100.set_auth_bearer_token(TEST_USER_BEARER_TOKEN) @@ -2576,16 +2620,14 @@ def test_update_service_but_backend_id_not_found(self, api100): assert resp.status_code == 404 - def test_update_service_service_id_not_found( - self, api100, backend1, requests_mock, service_metadata_wmts_foo - ): + def test_update_service_service_id_not_found(self, api100, backend1, requests_mock, service_metadata_wmts_foo): """When the service ID does not exist for the specified backend, then the aggregator responds with HTTP 404, not found.""" api100.set_auth_bearer_token(TEST_USER_BEARER_TOKEN) mock_patch = requests_mock.patch( backend1 + "/services/servicedoesnotexist", json=service_metadata_wmts_foo.prepare_for_json(), - status_code=404 + status_code=404, ) process_graph = {"bar": {"process_id": "bar", "arguments": {"new_arg": "somevalue"}}} json_payload = {"process": {"process_graph": process_graph}} @@ -2610,8 +2652,8 @@ def test_update_service_backend_response_is_an_error_status( "id": "936DA01F-9ABD-4D9D-80C7-02AF85C822A8", "code": "ErrorUpdatingService", "message": "Service 'wmts-foo' could not be updated.", - "url": "https://example.openeo.org/docs/errors/SampleError" - } + "url": "https://example.openeo.org/docs/errors/SampleError", + }, ) process_graph = {"bar": {"process_id": "bar", "arguments": {"new_arg": "somevalue"}}} json_payload = {"process": {"process_graph": process_graph}} @@ -2737,7 +2779,6 @@ def test_delete_existing(self, api100, requests_mock, backend1): class TestResilience: - @pytest.fixture def broken_backend2(self, requests_mock) -> Tuple[str, "requests_mock.adapter._Matcher"]: """Fixture to quickly set up a config with broken backend2""" @@ -2776,9 +2817,10 @@ def test_startup_during_backend_downtime_and_recover(self, backend1, broken_back # Backend 2 is up again, but cached is still active requests_mock.get(backend2 + "/", json={"api_version": "1.0.0"}) - requests_mock.get(backend2 + "/credentials/oidc", json={"providers": [ - {"id": "egi", "issuer": "https://egi.test", "title": "EGI"} - ]}) + requests_mock.get( + backend2 + "/credentials/oidc", + json={"providers": [{"id": "egi", "issuer": "https://egi.test", "title": "EGI"}]}, + ) requests_mock.get(backend2 + "/health", text="ok again") assert api100.get("/health").assert_status_code(200).json["backend_status"] == { "b1": {"status_code": 200, "text": "OK", "response_time": pytest.approx(0.1, abs=0.1)}, @@ -2802,41 +2844,36 @@ def get_userinfo(request: requests.Request, context): assert request.headers["Authorization"] == "Bearer t0k3n" return {"sub": "john"} - requests_mock.get("https://egi.test/.well-known/openid-configuration", json={ - "userinfo_endpoint": "https://egi.test/userinfo" - }) + requests_mock.get( + "https://egi.test/.well-known/openid-configuration", json={"userinfo_endpoint": "https://egi.test/userinfo"} + ) requests_mock.get("https://egi.test/userinfo", json=get_userinfo) # Job listings: backend1 works, backend2 is down - requests_mock.get(backend1 + "/jobs", json={"jobs": [ - {"id": "j0b1", "status": "running", "created": "2021-01-11T11:11:11Z"} - ]}) + requests_mock.get( + backend1 + "/jobs", json={"jobs": [{"id": "j0b1", "status": "running", "created": "2021-01-11T11:11:11Z"}]} + ) requests_mock.get(backend2 + "/jobs", status_code=500, text="nope") api100.set_auth_bearer_token(token="oidc/egi/t0k3n") jobs = api100.get("/jobs").assert_status_code(200).json - assert jobs["jobs"] == [ - {"id": "b1-j0b1", "status": "running", "created": "2021-01-11T11:11:11Z"} - ] + assert jobs["jobs"] == [{"id": "b1-j0b1", "status": "running", "created": "2021-01-11T11:11:11Z"}] # Backend2 is up again (but still cached as down) requests_mock.get(backend2 + "/", json={"api_version": "1.0.0"}) - requests_mock.get(backend2 + "/credentials/oidc", json={"providers": [ - {"id": b2_oidc_provider_id, "issuer": "https://egi.test", "title": "EGI"} - ]}) + requests_mock.get( + backend2 + "/credentials/oidc", + json={"providers": [{"id": b2_oidc_provider_id, "issuer": "https://egi.test", "title": "EGI"}]}, + ) def get_jobs(request, context): assert request.headers["Authorization"] == f"Bearer oidc/{b2_oidc_provider_id}/t0k3n" - return {"jobs": [ - {"id": "j0b2", "status": "running", "created": "2021-02-22T22:22:22Z"} - ]} + return {"jobs": [{"id": "j0b2", "status": "running", "created": "2021-02-22T22:22:22Z"}]} requests_mock.get(backend2 + "/jobs", json=get_jobs) jobs = api100.get("/jobs").assert_status_code(200).json - assert jobs["jobs"] == [ - {"id": "b1-j0b1", "status": "running", "created": "2021-01-11T11:11:11Z"} - ] + assert jobs["jobs"] == [{"id": "b1-j0b1", "status": "running", "created": "2021-01-11T11:11:11Z"}] # Skip time so that connection cache is cleared with clock_mock(offset=1000):