From 9f9282deb432b9a3dda91467dcdbbf1cd2f06dd3 Mon Sep 17 00:00:00 2001 From: Jenifer Tabita Ciuciu-Kiss Date: Sun, 27 Oct 2024 02:38:16 +0100 Subject: [PATCH] fix https issues --- ontologytimemachine/custom_proxy.py | 48 +++++- ontologytimemachine/proxy_wrapper.py | 13 +- ontologytimemachine/utils/proxy_logic.py | 23 +-- poetry.lock | 103 ++++++++++- pyproject.toml | 1 + tests/archivo_test_IRIs.tsv | 14 +- tests/non_archivo_test_IRIs.tsv | 5 +- ...r.py => test_proxy_auth_header_archivo.py} | 23 ++- tests/test_proxy_auth_header_non_archivo.py | 162 ++++++++++++++++++ 9 files changed, 354 insertions(+), 38 deletions(-) rename tests/{test_proxy_auth_header.py => test_proxy_auth_header_archivo.py} (92%) create mode 100644 tests/test_proxy_auth_header_non_archivo.py diff --git a/ontologytimemachine/custom_proxy.py b/ontologytimemachine/custom_proxy.py index a685474..17e6893 100644 --- a/ontologytimemachine/custom_proxy.py +++ b/ontologytimemachine/custom_proxy.py @@ -45,6 +45,17 @@ def before_upstream_connection(self, request: HttpParser) -> HttpParser | None: logger.info("Before upstcream connection hook") logger.info(f"Request method: {request.method} - Request host: {request.host} - Request path: {request.path} - Request headers: {request.headers}") wrapped_request = HttpRequestWrapper(request) + + try: + self.client.request_host = wrapped_request.get_request_host() + except: + logger.info('No host') + + try: + self.client.request_path = wrapped_request.get_request_path() + except: + logger.info('No path') + if (self.config.clientConfigViaProxyAuth == ClientConfigViaProxyAuth.REQUIRED or self.config.clientConfigViaProxyAuth == ClientConfigViaProxyAuth.OPTIONAL): logger.info('Setting up config from auth') @@ -83,12 +94,20 @@ def before_upstream_connection(self, request: HttpParser) -> HttpParser | None: else: logger.info("CONNECT request was blocked due to the configuration") return None - - response = get_response_from_request(wrapped_request, config) - if response.status_code: - logger.info(response.status_code) - self.queue_response(response) - return None + + if not wrapped_request.is_connect_request(): + logger.info('Skip for the connect request') + if not wrapped_request.get_request_host(): + if hasattr(self.client, "request_host"): + wrapped_request.set_request_host(self.client.request_host) + if not wrapped_request.get_request_path(): + if hasattr(self.client, "request_path"): + wrapped_request.set_request_path(self.client.request_path) + response = get_response_from_request(wrapped_request, config) + if response.status_code: + logger.info('Queue response from proxy logic') + self.queue_response(response) + return None return request @@ -117,6 +136,12 @@ def do_intercept(self, _request: HttpParser) -> bool: # this should actually be not triggered as the CONNECT request should have been blocked before return False elif config.httpsInterception == HttpsInterception.ARCHIVO: + if not wrapped_request.get_request_host(): + if hasattr(self.client, "request_host"): + wrapped_request.set_request_host(self.client.request_host) + if not wrapped_request.get_request_path(): + if hasattr(self.client, "request_path"): + wrapped_request.set_request_path(self.client.request_path) try: if is_archivo_ontology_request(wrapped_request): logger.info("Intercepting HTTPS request since it is an Archivo ontology request") @@ -135,6 +160,7 @@ def handle_client_request(self, request: HttpParser) -> HttpParser: wrapped_request = HttpRequestWrapper(request) if (wrapped_request.is_head_request() or wrapped_request.is_get_request()) and hasattr(self.client, "mark_connect"): + logger.info('HEAD or GET and has mark_connect') if self.client.mark_connect: if hasattr(self.client, "config"): logger.info("Using the configuration from the Auth") @@ -142,12 +168,20 @@ def handle_client_request(self, request: HttpParser) -> HttpParser: else: logger.info("Using the proxy configuration") config = self.config + if not wrapped_request.get_request_host(): + if hasattr(self.client, "request_host"): + wrapped_request.set_request_host(self.client.request_host) + if not wrapped_request.get_request_path(): + if hasattr(self.client, "request_path"): + wrapped_request.set_request_path(self.client.request_path) + response = get_response_from_request(wrapped_request, config) - if response.status_code: + if response and response.status_code: logger.info(response.status_code) self.queue_response(response) return None + logger.info('Return original request') return request def handle_upstream_chunk(self, chunk: memoryview): diff --git a/ontologytimemachine/proxy_wrapper.py b/ontologytimemachine/proxy_wrapper.py index cb3c8e0..4892067 100644 --- a/ontologytimemachine/proxy_wrapper.py +++ b/ontologytimemachine/proxy_wrapper.py @@ -36,6 +36,10 @@ def get_request_path(self) -> str: @abstractmethod def set_request_path(self, new_path) -> None: pass + + @abstractmethod + def set_request_host(self, new_host) -> None: + pass @abstractmethod def get_request_headers(self) -> Dict[str, str]: @@ -77,7 +81,10 @@ def is_https_request(self) -> bool: ).startswith(b"https") def get_request_host(self) -> str: - return self.request.host.decode("utf-8") + if self.request.host: + return self.request.host.decode("utf-8") + else: + return "" def get_request_path(self) -> str: if self.request.path: @@ -88,6 +95,10 @@ def get_request_path(self) -> str: def set_request_path(self, new_path: str) -> None: self.request.path = new_path.encode("utf-8") logger.info(f"Request path set to: {new_path}") + + def set_request_host(self, new_host: str) -> None: + self.request.host = new_host.encode("utf-8") + logger.info(f"Request path set to: {new_host}") def get_request_headers(self) -> Dict[str, str]: headers: Dict[str, str] = {} diff --git a/ontologytimemachine/utils/proxy_logic.py b/ontologytimemachine/utils/proxy_logic.py index af22f4b..adca8ef 100644 --- a/ontologytimemachine/utils/proxy_logic.py +++ b/ontologytimemachine/utils/proxy_logic.py @@ -144,20 +144,15 @@ def is_archivo_ontology_request(wrapped_request): return False -def request_ontology( - wrapped_request, url, headers, disableRemovingRedirects=False, timeout=5 -): +def request_ontology(wrapped_request, url, headers, disableRemovingRedirects=False, timeout=3): allow_redirects = not disableRemovingRedirects + logger.info(f'Request parameters: url - {url}, headers - {headers}, allow_redirects - {allow_redirects}') try: if wrapped_request.is_head_request(): - response = requests.head(url=url, headers=headers, allow_redirects=allow_redirects, timeout=3) - logger.info(response.content) - logger.info(response.status_code) + response = requests.head(url=url, headers=headers, allow_redirects=allow_redirects, timeout=timeout) else: - response = requests.get(url=url, headers=headers, allow_redirects=allow_redirects, timeout=3) - logger.info(response.content) - logger.info(response.status_code) - logger.info("Successfully fetched ontology") + response = requests.get(url=url, headers=headers, allow_redirects=allow_redirects, timeout=timeout) + logger.info(f"Successfully fetched ontology - status_code: {response.status_code}") return response except Exception as e: logger.error(f"Error fetching original ontology: {e}") @@ -255,9 +250,15 @@ def fetch_latest_archived(wrapped_request, headers): ontology, _, _ = wrapped_request.get_request_url_host_path() dbpedia_url = f"{archivo_api}?o={ontology}&f={format}" logger.info(f"Fetching from DBpedia Archivo API: {dbpedia_url}") + response = request_ontology(wrapped_request, dbpedia_url, headers) + if response.status_code != 500: + return response + ontology = ontology.replace('http://', 'https://') + logger.info(f'HTTPS ontology: {ontology}') + dbpedia_url = f"{archivo_api}?o={ontology}&f={format}" + logger.info(f"Fetching from DBpedia Archivo API - https: {dbpedia_url}") return request_ontology(wrapped_request, dbpedia_url, headers) - def fetch_timestamp_archived(wrapped_request, headers, config): if not is_archivo_ontology_request(wrapped_request): logger.info( diff --git a/poetry.lock b/poetry.lock index 1e34442..3cc9624 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,27 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "anyio" +version = "4.6.2.post1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + [[package]] name = "certifi" version = "2024.8.30" @@ -135,6 +157,63 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "idna" version = "3.10" @@ -389,6 +468,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -400,6 +490,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + [[package]] name = "urllib3" version = "2.2.3" @@ -437,4 +538,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "9efdbca22e8f7d122208d160253c194f4f3d177e77a011491bbaac34fac5c237" +content-hash = "cab2d2ab30a2cf718e53410b78e1849c324c58b9c69b23df45615f5ed538b829" diff --git a/pyproject.toml b/pyproject.toml index ebce3c9..8bda85e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ proxy-py = "^2.4.4" rdflib = "^7.0.0" werkzeug = "^3.0.4" schedule = "^1.2.2" +httpx = "^0.27.2" [build-system] diff --git a/tests/archivo_test_IRIs.tsv b/tests/archivo_test_IRIs.tsv index 17c7908..f883074 100644 --- a/tests/archivo_test_IRIs.tsv +++ b/tests/archivo_test_IRIs.tsv @@ -12,11 +12,11 @@ enable_testcase iri error_dimension expected_error iri_type comment 1 http://xmlns.com/foaf/0.1/Person None term 1 http://dbpedia.org/ontology/ None term 1 http://dbpedia.org/ontology/Person None term -0 https://bag2.basisregistraties.overheid.nl/bag/def/ http-code 404 slash -0 https://bag2.basisregistraties.overheid.nl/bag/def/Gebruiksdoel http-code 404 term -0 https://id.parliament.uk/schema http-code 404 slash slash onto without trailing slash / -0 https://id.parliament.uk/schema/Approval http-code 404 term slash onto without trailing slash / -0 https://bmake.th-brandenburg.de/spv# http-code 403 hash -0 https://bmake.th-brandenburg.de/spv http-code 403 hash just test whether Archivo API is used correctly -0 https://w3id.org/ttla/ transport cert-expired hash +1 https://bag2.basisregistraties.overheid.nl/bag/def/ http-code 404 slash +1 https://bag2.basisregistraties.overheid.nl/bag/def/Gebruiksdoel http-code 404 term +1 https://id.parliament.uk/schema http-code 404 slash slash onto without trailing slash / +1 https://id.parliament.uk/schema/Approval http-code 404 term slash onto without trailing slash / +1 https://bmake.th-brandenburg.de/spv# http-code 403 hash +1 https://bmake.th-brandenburg.de/spv http-code 403 hash just test whether Archivo API is used correctly +1 https://w3id.org/ttla/ transport cert-expired hash 1 http://data-gov.tw.rpi.edu/2009/data-gov-twc.rdf transport connection-refused hash diff --git a/tests/non_archivo_test_IRIs.tsv b/tests/non_archivo_test_IRIs.tsv index 627c6bf..e05a40e 100644 --- a/tests/non_archivo_test_IRIs.tsv +++ b/tests/non_archivo_test_IRIs.tsv @@ -3,10 +3,9 @@ enable_testcase iri error_dimension expected_error iri_type comment 0 https://www.w3.org/1999/02/22-rdf-syntax-ns# None https is not ID 0 http://example.org None 0 https://example.org None -0 http://1.1.1.1 None +1 http://1.1.1.1 None 0 https://1.1.1.1 None 0 https://data.globalchange.gov/gcis.owl http-code 403 https is not ID 0 https://data.ordnancesurvey.co.uk/ontology/geometry/ http-code 404 https is not ID 0 https://data.ordnancesurvey.co.uk/ontology/ http-code 301 https is not ID -0 https://google.com None -0 \ No newline at end of file +0 https://google.com None \ No newline at end of file diff --git a/tests/test_proxy_auth_header.py b/tests/test_proxy_auth_header_archivo.py similarity index 92% rename from tests/test_proxy_auth_header.py rename to tests/test_proxy_auth_header_archivo.py index 21ee6e6..844dcfc 100644 --- a/tests/test_proxy_auth_header.py +++ b/tests/test_proxy_auth_header_archivo.py @@ -8,6 +8,7 @@ from requests.auth import _basic_auth_str from requests.exceptions import SSLError from ontologytimemachine.custom_proxy import IP, PORT +from urllib.parse import quote # Proxy settings PROXY = f"0.0.0.0:8894" @@ -16,6 +17,7 @@ PROXIES = {"http": HTTP_PROXY, "https": HTTPS_PROXY} CA_CERT_PATH = "ca-cert.pem" + logging.basicConfig( level=logging.ERROR, format="%(asctime)s - %(levelname)s - %(message)s", @@ -75,14 +77,20 @@ def make_request_with_proxy(iri: str, mode: str) -> Tuple[int, str]: """Make a request to the IRI using the proxy.""" username = f"--ontoVersion {mode}" password = "my_password" + # Encode credentials to handle special characters + username_encoded = quote(username) + password_encoded = quote(password) + proxies = { + "http": f"http://{username_encoded}:{password_encoded}@{PROXY}", + "https": f"http://{username_encoded}:{password_encoded}@{PROXY}" + } headers = { "Accept": "text/turtle", "Accept-Encoding": "identity", - "Proxy-Authorization": _basic_auth_str(username, password) } try: # There is an issue here for https requests - response = requests.get(iri, proxies=PROXIES, verify=CA_CERT_PATH, headers=headers, timeout=10) + response = requests.get(iri, proxies=proxies, verify=CA_CERT_PATH, headers=headers, timeout=10) return response except SSLError as e: mock_response = Mock() @@ -131,8 +139,6 @@ def test_proxy_responses(test_case): # Make direct and proxy requests direct_response = make_request_without_proxy(iri) proxy_original_response = make_request_with_proxy(iri, 'original') - proxy_failover_response = make_request_with_proxy(iri, 'originalFailoverLiveLatest') - proxy_archivo_laest_response = make_request_with_proxy(iri, 'latestArchived') # Evaluation based on error_dimension if error_dimension == 'http-code': @@ -168,16 +174,17 @@ def test_proxy_responses(test_case): assert direct_response.status_code == 'connection-refused-error' assert proxy_original_response.status_code == 'connection-refused-error' + + proxy_failover_response = make_request_with_proxy(iri, 'originalFailoverLiveLatest') + proxy_archivo_latest_response = make_request_with_proxy(iri, 'latestArchived') + assert 200 == proxy_failover_response.status_code - assert 200 == proxy_archivo_laest_response.status_code + assert 200 == proxy_archivo_latest_response.status_code else: assert True - if __name__ == "__main__": - # You can call pytest from within the script pytest.main([__file__]) - \ No newline at end of file diff --git a/tests/test_proxy_auth_header_non_archivo.py b/tests/test_proxy_auth_header_non_archivo.py new file mode 100644 index 0000000..b878b5c --- /dev/null +++ b/tests/test_proxy_auth_header_non_archivo.py @@ -0,0 +1,162 @@ +import pytest +import requests +import logging +import csv +from typing import List, Tuple +from unittest.mock import Mock +from requests.auth import HTTPBasicAuth +from requests.auth import _basic_auth_str +from requests.exceptions import SSLError +from ontologytimemachine.custom_proxy import IP, PORT +from urllib.parse import quote + +# Proxy settings +PROXY = f"0.0.0.0:8894" +HTTP_PROXY = f"http://{PROXY}" +HTTPS_PROXY = f"http://{PROXY}" +PROXIES = {"http": HTTP_PROXY, "https": HTTPS_PROXY} +CA_CERT_PATH = "ca-cert.pem" + + +logging.basicConfig( + level=logging.ERROR, + format="%(asctime)s - %(levelname)s - %(message)s", +) +logger = logging.getLogger(__name__) + +# Load data from the TSV file dynamically +def load_test_data(file_path): + with open(file_path, 'r') as tsv_file: + reader = csv.DictReader(tsv_file, delimiter='\t') + return [row for row in reader] + + +def create_fake_response(status_code='error'): + fake_response = requests.models.Response() + fake_response.status_code = status_code # Assign the status code you want to simulate + fake_response._content = b'{"error": "This is a simulated error"}' # Set some fake content + return fake_response + + +def make_request_without_proxy(iri: str) -> Tuple[int, str]: + """Make a direct request to the IRI without using the proxy.""" + try: + response = requests.get(iri, timeout=10, allow_redirects=True) + return response + except SSLError as e: + mock_response = Mock() + mock_response.status_code = 'ssl-error' + return mock_response + except requests.exceptions.Timeout: + mock_response = Mock() + mock_response.status_code = 'timeout-error' + return mock_response + except requests.exceptions.ConnectionError as e: + if 'NameResolutionError' in str(e): + mock_response = Mock() + mock_response.status_code = 'nxdomain-error' + return mock_response + elif 'Connection refused' in str(e) or 'Errno 111' in str(e): + mock_response = Mock() + mock_response.status_code = 'connection-refused-error' + return mock_response + else: + mock_response = Mock() + mock_response.status_code = 'error' + return mock_response + except Exception as e: + mock_response = Mock() + mock_response.status_code = 'error' + return mock_response + +def make_request_with_proxy(iri: str, mode: str) -> Tuple[int, str]: + logger.info('Run') + """Make a request to the IRI using the proxy.""" + username = f"--ontoVersion {mode}" + password = "my_password" + # Encode credentials to handle special characters + username_encoded = quote(username) + password_encoded = quote(password) + proxies = { + "http": f"http://{username_encoded}:{password_encoded}@{PROXY}", + "https": f"http://{username_encoded}:{password_encoded}@{PROXY}" + } + headers = { + "Accept-Encoding": "identity", + } + try: + # There is an issue here for https requests + response = requests.get(iri, proxies=proxies, verify=CA_CERT_PATH, headers=headers, timeout=10) + return response + except SSLError as e: + mock_response = Mock() + mock_response.content = '' + mock_response.status_code = 'ssl-error' + return mock_response + except requests.exceptions.Timeout: + mock_response = Mock() + mock_response.content = '' + mock_response.status_code = 'timeout-error' + return mock_response + except requests.exceptions.ConnectionError as e: + if 'NXDOMAIN' in str(e): + mock_response = Mock() + mock_response.content = '' + mock_response.status_code = 'nxdomain-error' + return mock_response + elif 'Connection refused' in str(e) or 'Errno 111' in str(e): + mock_response = Mock() + mock_response.content = '' + mock_response.status_code = 'connection-refused-error' + return mock_response + else: + mock_response = Mock() + mock_response.content = '' + mock_response.status_code = 'error' + return mock_response + except Exception as e: + mock_response = Mock() + mock_response.content = '' + mock_response.status_code = 'error' + return mock_response + + +# Parametrize the test cases with data loaded from the TSV file +@pytest.mark.parametrize("test_case", load_test_data('tests/non_archivo_test_IRIs.tsv')) +def test_proxy_responses(test_case): + enabled = test_case['enable_testcase'] + + iri = test_case['iri'] + error_dimension = test_case['error_dimension'] + expected_error = test_case['expected_error'] + iri_type = test_case['iri_type'] + comment = test_case['comment'] + + if enabled == '1': + # Make direct and proxy requests + direct_response = make_request_without_proxy(iri) + proxy_original_response = make_request_with_proxy(iri, 'original') + #proxy_failover_response = make_request_with_proxy(iri, 'originalFailoverLiveLatest') + #proxy_archivo_latest_response = make_request_with_proxy(iri, 'latestArchived') + + + # Evaluation based on error_dimension + if error_dimension == 'http-code': + assert int(expected_error) == direct_response.status_code + assert int(expected_error) == proxy_original_response.status_code + #assert int(expected_error) == proxy_failover_response.status_code + #assert int(expected_error) == proxy_archivo_latest_response.status_code + + elif error_dimension == 'None': + assert direct_response.status_code == 200 + assert proxy_original_response.status_code == 200 + #assert proxy_failover_response.status_code == 200 + #assert proxy_archivo_latest_response.status_code == 200 + + else: + assert True + + +if __name__ == "__main__": + pytest.main([__file__]) +