diff --git a/.circleci/integration-tests/astronomer_migration_dag.py b/.circleci/integration-tests/astronomer_migration_dag.py index 064739c88..016637067 100644 --- a/.circleci/integration-tests/astronomer_migration_dag.py +++ b/.circleci/integration-tests/astronomer_migration_dag.py @@ -1,4 +1,5 @@ """Astronomer migration DAG to transform metadata from source deployment to target Astro Cloud deployment.""" + from datetime import datetime from airflow import DAG diff --git a/.circleci/integration-tests/master_dag.py b/.circleci/integration-tests/master_dag.py index eb2eb6aab..445ca265a 100644 --- a/.circleci/integration-tests/master_dag.py +++ b/.circleci/integration-tests/master_dag.py @@ -1,4 +1,5 @@ """Master Dag to run all the example dags.""" + import logging import os import time diff --git a/.github/scripts/get_latest_runtime_image_tag.py b/.github/scripts/get_latest_runtime_image_tag.py index 74ad191a1..3ca879c63 100644 --- a/.github/scripts/get_latest_runtime_image_tag.py +++ b/.github/scripts/get_latest_runtime_image_tag.py @@ -1,4 +1,5 @@ """This script fetches the latest runtime image tag from the provided Quay.io repository URL.""" + from __future__ import annotations import sys diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ff0e16ac9..ff47c1266 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,7 +44,7 @@ repos: args: ["--config=mlc-config.json"] - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.1.1 hooks: - id: black @@ -90,7 +90,7 @@ repos: additional_dependencies: ['toml'] - repo: https://github.com/PyCQA/bandit - rev: 1.7.6 + rev: 1.7.7 hooks: - id: bandit # Excluding Example DAGs temporarily until they can be fixed diff --git a/astronomer/providers/amazon/aws/example_dags/example_aws_nuke.py b/astronomer/providers/amazon/aws/example_dags/example_aws_nuke.py index 0bb3601ea..9e58e31fd 100644 --- a/astronomer/providers/amazon/aws/example_dags/example_aws_nuke.py +++ b/astronomer/providers/amazon/aws/example_dags/example_aws_nuke.py @@ -1,4 +1,5 @@ """DAG to nuke AWS resources.""" + import logging import os from datetime import datetime, timedelta diff --git a/astronomer/providers/amazon/aws/example_dags/example_batch.py b/astronomer/providers/amazon/aws/example_dags/example_batch.py index 1776ea791..5bfa4c382 100644 --- a/astronomer/providers/amazon/aws/example_dags/example_batch.py +++ b/astronomer/providers/amazon/aws/example_dags/example_batch.py @@ -1,4 +1,5 @@ """Example DAG for the AWS Batch Operator Async""" + import logging import os import time diff --git a/astronomer/providers/amazon/aws/example_dags/example_redshift_cluster_management.py b/astronomer/providers/amazon/aws/example_dags/example_redshift_cluster_management.py index f6506d159..5ed666a92 100644 --- a/astronomer/providers/amazon/aws/example_dags/example_redshift_cluster_management.py +++ b/astronomer/providers/amazon/aws/example_dags/example_redshift_cluster_management.py @@ -1,4 +1,5 @@ """Airflow operators example to manage AWS Redshift cluster.""" + import os from datetime import datetime, timedelta from typing import Any diff --git a/astronomer/providers/amazon/aws/hooks/base_aws.py b/astronomer/providers/amazon/aws/hooks/base_aws.py index aa38b0ec3..7ecedacec 100644 --- a/astronomer/providers/amazon/aws/hooks/base_aws.py +++ b/astronomer/providers/amazon/aws/hooks/base_aws.py @@ -1,4 +1,5 @@ """This module contains async AWS Base Hook for deferrable operators and sensors.""" + from __future__ import annotations from typing import Any diff --git a/astronomer/providers/amazon/aws/operators/batch.py b/astronomer/providers/amazon/aws/operators/batch.py index 5370ab75e..1eda818ea 100644 --- a/astronomer/providers/amazon/aws/operators/batch.py +++ b/astronomer/providers/amazon/aws/operators/batch.py @@ -7,6 +7,7 @@ - `Batch `_ - `Welcome `_ """ + from __future__ import annotations import warnings diff --git a/astronomer/providers/apache/hive/example_dags/example_hive.py b/astronomer/providers/apache/hive/example_dags/example_hive.py index 6e4e18052..1e91b4e0d 100644 --- a/astronomer/providers/apache/hive/example_dags/example_hive.py +++ b/astronomer/providers/apache/hive/example_dags/example_hive.py @@ -1,4 +1,5 @@ """This is an example dag for hive partition sensors.""" + import logging import os import time diff --git a/astronomer/providers/apache/hive/hooks/hive.py b/astronomer/providers/apache/hive/hooks/hive.py index 11d0bc3fd..2eccbee59 100644 --- a/astronomer/providers/apache/hive/hooks/hive.py +++ b/astronomer/providers/apache/hive/hooks/hive.py @@ -1,4 +1,5 @@ """This module contains the Apache HiveCli hook async.""" + import asyncio from typing import Tuple diff --git a/astronomer/providers/apache/livy/example_dags/example_livy.py b/astronomer/providers/apache/livy/example_dags/example_livy.py index e32aae1af..f2cc834d1 100644 --- a/astronomer/providers/apache/livy/example_dags/example_livy.py +++ b/astronomer/providers/apache/livy/example_dags/example_livy.py @@ -3,6 +3,7 @@ The tasks below trigger the computation of pi on the Spark instance using the Java and Python executables provided in the example library. """ + import logging import os import time diff --git a/astronomer/providers/apache/livy/hooks/livy.py b/astronomer/providers/apache/livy/hooks/livy.py index cbe2e2a4b..b3d55c504 100644 --- a/astronomer/providers/apache/livy/hooks/livy.py +++ b/astronomer/providers/apache/livy/hooks/livy.py @@ -1,4 +1,5 @@ """This module contains the Apache Livy hook async.""" + import asyncio import re import warnings diff --git a/astronomer/providers/apache/livy/operators/livy.py b/astronomer/providers/apache/livy/operators/livy.py index 94d9c0eea..a2b8a0dad 100644 --- a/astronomer/providers/apache/livy/operators/livy.py +++ b/astronomer/providers/apache/livy/operators/livy.py @@ -1,4 +1,5 @@ """This module contains the Apache Livy operator async.""" + from __future__ import annotations import warnings diff --git a/astronomer/providers/apache/livy/triggers/livy.py b/astronomer/providers/apache/livy/triggers/livy.py index 7d48cc6aa..610c37c97 100644 --- a/astronomer/providers/apache/livy/triggers/livy.py +++ b/astronomer/providers/apache/livy/triggers/livy.py @@ -1,4 +1,5 @@ """This module contains the Apache Livy Trigger.""" + import asyncio import warnings from typing import Any, AsyncIterator, Dict, Optional, Tuple, Union diff --git a/astronomer/providers/cncf/kubernetes/example_dags/example_kubernetes_pod_operator.py b/astronomer/providers/cncf/kubernetes/example_dags/example_kubernetes_pod_operator.py index 3344a5660..cafb204d8 100644 --- a/astronomer/providers/cncf/kubernetes/example_dags/example_kubernetes_pod_operator.py +++ b/astronomer/providers/cncf/kubernetes/example_dags/example_kubernetes_pod_operator.py @@ -1,4 +1,5 @@ """Example DAG demonstrating the usage of the KubernetesPodOperatorAsync.""" + import os from datetime import datetime, timedelta diff --git a/astronomer/providers/google/cloud/example_dags/example_bigquery_impersonation_chain.py b/astronomer/providers/google/cloud/example_dags/example_bigquery_impersonation_chain.py index 600c0db99..7770b8d59 100644 --- a/astronomer/providers/google/cloud/example_dags/example_bigquery_impersonation_chain.py +++ b/astronomer/providers/google/cloud/example_dags/example_bigquery_impersonation_chain.py @@ -1,4 +1,5 @@ """Example Airflow DAG which uses impersonation parameters for authenticating with Google BigQuery service.""" + import os from datetime import datetime, timedelta from typing import Any diff --git a/astronomer/providers/google/cloud/example_dags/example_bigquery_queries.py b/astronomer/providers/google/cloud/example_dags/example_bigquery_queries.py index 0bbba4234..e2139e62a 100644 --- a/astronomer/providers/google/cloud/example_dags/example_bigquery_queries.py +++ b/astronomer/providers/google/cloud/example_dags/example_bigquery_queries.py @@ -2,6 +2,7 @@ Example Airflow DAG for Google BigQuery service. Uses Async version of BigQueryInsertJobOperator and BigQueryCheckOperator. """ + import os from datetime import datetime, timedelta from typing import Any diff --git a/astronomer/providers/google/cloud/example_dags/example_bigquery_sensors.py b/astronomer/providers/google/cloud/example_dags/example_bigquery_sensors.py index 65a0661e2..77d4b7823 100644 --- a/astronomer/providers/google/cloud/example_dags/example_bigquery_sensors.py +++ b/astronomer/providers/google/cloud/example_dags/example_bigquery_sensors.py @@ -1,4 +1,5 @@ """Example Airflow DAG for Google BigQuery Sensors.""" + import os from datetime import datetime, timedelta from typing import Any diff --git a/astronomer/providers/google/cloud/example_dags/example_gcp_nuke.py b/astronomer/providers/google/cloud/example_dags/example_gcp_nuke.py index b7ec46004..509698f87 100644 --- a/astronomer/providers/google/cloud/example_dags/example_gcp_nuke.py +++ b/astronomer/providers/google/cloud/example_dags/example_gcp_nuke.py @@ -1,4 +1,5 @@ """DAG to delete stale GCP resources.""" + import os from datetime import timedelta diff --git a/astronomer/providers/google/cloud/example_dags/example_gcs.py b/astronomer/providers/google/cloud/example_dags/example_gcs.py index 212592630..eafb034a7 100644 --- a/astronomer/providers/google/cloud/example_dags/example_gcs.py +++ b/astronomer/providers/google/cloud/example_dags/example_gcs.py @@ -1,4 +1,5 @@ """Example Airflow DAG for Google Cloud Storage operators.""" + import os from datetime import datetime, timedelta diff --git a/astronomer/providers/google/cloud/gke_utils.py b/astronomer/providers/google/cloud/gke_utils.py index 2ae5018e6..9f66bf136 100644 --- a/astronomer/providers/google/cloud/gke_utils.py +++ b/astronomer/providers/google/cloud/gke_utils.py @@ -9,7 +9,7 @@ from airflow.exceptions import AirflowException from airflow.providers.google.common.hooks.base_google import GoogleBaseHook from airflow.utils.process_utils import execute_in_subprocess, patch_environ -from google.auth import impersonated_credentials # type: ignore[attr-defined] +from google.auth import impersonated_credentials from google.cloud.container_v1 import ClusterManagerClient from google.oauth2.service_account import Credentials from kubernetes_asyncio.config.kube_config import KubeConfigLoader @@ -76,9 +76,9 @@ def _get_gke_config_file( # Write config to a temp file and set the environment variable to point to it. # This is to avoid race conditions of reading/writing a single file - with tempfile.NamedTemporaryFile() as conf_file, patch_environ( - {KUBE_CONFIG_ENV_VAR: conf_file.name} - ), hook.provide_authorized_gcloud(): + with tempfile.NamedTemporaryFile() as conf_file: + with patch_environ({KUBE_CONFIG_ENV_VAR: conf_file.name}): + hook.provide_authorized_gcloud() # Attempt to get/update credentials # We call gcloud directly instead of using google-cloud-python api # because there is no way to write kubernetes config to a file, which is diff --git a/astronomer/providers/google/cloud/hooks/gcs.py b/astronomer/providers/google/cloud/hooks/gcs.py index 8a2bc848e..c60fbf1bc 100644 --- a/astronomer/providers/google/cloud/hooks/gcs.py +++ b/astronomer/providers/google/cloud/hooks/gcs.py @@ -1,4 +1,5 @@ """This module contains a Google Cloud Storage hook.""" + import warnings from typing import Any, cast diff --git a/astronomer/providers/google/cloud/operators/bigquery.py b/astronomer/providers/google/cloud/operators/bigquery.py index 0d9a130de..3d07efc2e 100644 --- a/astronomer/providers/google/cloud/operators/bigquery.py +++ b/astronomer/providers/google/cloud/operators/bigquery.py @@ -1,4 +1,5 @@ """This module contains Google BigQueryAsync providers.""" + from __future__ import annotations import warnings diff --git a/astronomer/providers/google/cloud/operators/dataproc.py b/astronomer/providers/google/cloud/operators/dataproc.py index fd455aa4b..dff462020 100644 --- a/astronomer/providers/google/cloud/operators/dataproc.py +++ b/astronomer/providers/google/cloud/operators/dataproc.py @@ -1,4 +1,5 @@ """This module contains Google Dataproc operators.""" + from __future__ import annotations import warnings diff --git a/astronomer/providers/google/cloud/operators/kubernetes_engine.py b/astronomer/providers/google/cloud/operators/kubernetes_engine.py index b500d4993..904e87e8a 100644 --- a/astronomer/providers/google/cloud/operators/kubernetes_engine.py +++ b/astronomer/providers/google/cloud/operators/kubernetes_engine.py @@ -1,4 +1,5 @@ """This module contains Google GKE operators.""" + from __future__ import annotations from typing import Any, Sequence diff --git a/astronomer/providers/google/cloud/sensors/bigquery.py b/astronomer/providers/google/cloud/sensors/bigquery.py index e4c27eb35..030857b15 100644 --- a/astronomer/providers/google/cloud/sensors/bigquery.py +++ b/astronomer/providers/google/cloud/sensors/bigquery.py @@ -1,4 +1,5 @@ """This module contains Google Big Query sensors.""" + from __future__ import annotations import warnings diff --git a/astronomer/providers/google/cloud/sensors/gcs.py b/astronomer/providers/google/cloud/sensors/gcs.py index 5f9925014..0465e2de7 100644 --- a/astronomer/providers/google/cloud/sensors/gcs.py +++ b/astronomer/providers/google/cloud/sensors/gcs.py @@ -1,4 +1,5 @@ """This module contains Google Cloud Storage sensors.""" + from __future__ import annotations import warnings diff --git a/astronomer/providers/microsoft/azure/hooks/data_factory.py b/astronomer/providers/microsoft/azure/hooks/data_factory.py index 64622015b..f5354cea1 100644 --- a/astronomer/providers/microsoft/azure/hooks/data_factory.py +++ b/astronomer/providers/microsoft/azure/hooks/data_factory.py @@ -1,4 +1,5 @@ """This module contains the Azure Data Factory hook's asynchronous implementation.""" + from __future__ import annotations import inspect diff --git a/astronomer/providers/microsoft/azure/hooks/wasb.py b/astronomer/providers/microsoft/azure/hooks/wasb.py index b50dd610e..d837d47dd 100644 --- a/astronomer/providers/microsoft/azure/hooks/wasb.py +++ b/astronomer/providers/microsoft/azure/hooks/wasb.py @@ -1,4 +1,5 @@ """This module contains the Azure WASB hook's asynchronous implementation.""" + from __future__ import annotations import warnings diff --git a/dev/integration_test_scripts/replace_dependencies.py b/dev/integration_test_scripts/replace_dependencies.py index eb9dfbbab..dfd4716fa 100644 --- a/dev/integration_test_scripts/replace_dependencies.py +++ b/dev/integration_test_scripts/replace_dependencies.py @@ -65,9 +65,11 @@ def parse_providers_release_testing_gh_issue(issue_url: str) -> list[str]: h2_titles = first_comment.find_all("h2") package_urls = [_parse_pypi_url_from_h2_title(h2_title) for h2_title in h2_titles] pinned_packages = [ - _parse_pinned_package_from_pypi_url(url) - if "apache-airflow-providers-cncf-kubernetes" not in url - else _parse_pinned_pacakge_from_cncf(url) + ( + _parse_pinned_package_from_pypi_url(url) + if "apache-airflow-providers-cncf-kubernetes" not in url + else _parse_pinned_pacakge_from_cncf(url) + ) for url in package_urls ] return pinned_packages diff --git a/tests/http/hooks/test_http.py b/tests/http/hooks/test_http.py index dae4d4364..a783cc543 100644 --- a/tests/http/hooks/test_http.py +++ b/tests/http/hooks/test_http.py @@ -11,28 +11,24 @@ class TestHttpHookAsync: @pytest.mark.asyncio - async def test_do_api_call_async_non_retryable_error(self, aioresponse): + async def test_do_api_call_async_non_retryable_error(self, aioresponse, monkeypatch): hook = HttpHookAsync(method="GET") aioresponse.get("http://httpbin.org/non_existent_endpoint", status=400) - with pytest.raises(AirflowException) as exc, mock.patch.dict( - "os.environ", - AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", - ): + monkeypatch.setenv("AIRFLOW_CONN_HTTP_DEFAULT", "http://httpbin.org/") + with pytest.raises(AirflowException) as exc: await hook.run(endpoint="non_existent_endpoint") assert str(exc.value) == "400:Bad Request" @pytest.mark.asyncio - async def test_do_api_call_async_retryable_error(self, caplog, aioresponse): + async def test_do_api_call_async_retryable_error(self, caplog, aioresponse, monkeypatch): caplog.set_level(logging.WARNING, logger="astronomer.providers.http.hooks.http") hook = HttpHookAsync(method="GET") aioresponse.get("http://httpbin.org/non_existent_endpoint", status=500, repeat=True) - with pytest.raises(AirflowException) as exc, mock.patch.dict( - "os.environ", - AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", - ): + monkeypatch.setenv("AIRFLOW_CONN_HTTP_DEFAULT", "http://httpbin.org/") + with pytest.raises(AirflowException) as exc: await hook.run(endpoint="non_existent_endpoint") assert str(exc.value) == "500:Internal Server Error" @@ -72,7 +68,8 @@ async def test_post_request(self, aioresponse): ) with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=self.get_airflow_connection + "airflow.hooks.base.BaseHook.get_connection", + side_effect=self.get_airflow_connection, ): resp = await hook.run("v1/test") assert resp.status == 200 @@ -90,7 +87,8 @@ async def test_post_request_and_get_json_without_keep_response(self, aioresponse ) with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=self.get_airflow_connection + "airflow.hooks.base.BaseHook.get_connection", + side_effect=self.get_airflow_connection, ): resp = await hook.run("v1/test") with pytest.raises(ClientConnectionError, match="Connection closed"): @@ -109,7 +107,8 @@ async def test_post_request_and_get_json_with_keep_response(self, aioresponse): ) with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=self.get_airflow_connection + "airflow.hooks.base.BaseHook.get_connection", + side_effect=self.get_airflow_connection, ): resp = await hook.run("v1/test") resp_payload = await resp.json() @@ -128,7 +127,8 @@ async def test_post_request_with_error_code(self, aioresponse): ) with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=self.get_airflow_connection + "airflow.hooks.base.BaseHook.get_connection", + side_effect=self.get_airflow_connection, ): with pytest.raises(AirflowException): await hook.run("v1/test") diff --git a/tests/snowflake/hooks/test_snowflake_sql_api.py b/tests/snowflake/hooks/test_snowflake_sql_api.py index 58785d6d3..5f7a1ee9e 100644 --- a/tests/snowflake/hooks/test_snowflake_sql_api.py +++ b/tests/snowflake/hooks/test_snowflake_sql_api.py @@ -117,7 +117,12 @@ class TestSnowflakeSqlApiHookAsync: "sql,statement_count,expected_response, expected_query_ids", [ (SINGLE_STMT, 1, {"statementHandle": "uuid"}, ["uuid"]), - (SQL_MULTIPLE_STMTS, 4, {"statementHandles": ["uuid", "uuid1"]}, ["uuid", "uuid1"]), + ( + SQL_MULTIPLE_STMTS, + 4, + {"statementHandles": ["uuid", "uuid1"]}, + ["uuid", "uuid1"], + ), ], ) @mock.patch("astronomer.providers.snowflake.hooks.snowflake_sql_api.requests") @@ -261,7 +266,9 @@ def non_encrypted_temporary_private_key(self, tmp_path: Path) -> Path: """Encrypt the pem file from the path""" key = rsa.generate_private_key(backend=default_backend(), public_exponent=65537, key_size=2048) private_key = key.private_bytes( - serialization.Encoding.PEM, serialization.PrivateFormat.PKCS8, serialization.NoEncryption() + serialization.Encoding.PEM, + serialization.PrivateFormat.PKCS8, + serialization.NoEncryption(), ) test_key_file = tmp_path / "test_key.pem" test_key_file.write_bytes(private_key) @@ -297,13 +304,14 @@ def test_get_private_key_should_support_private_auth_in_connection( }, } with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + "os.environ", + AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri(), ): hook = SnowflakeSqlApiHookAsync(snowflake_conn_id="test_conn") hook.get_private_key() assert hook.private_key is not None - def test_get_private_key_raise_exception(self, encrypted_temporary_private_key: Path): + def test_get_private_key_raise_exception(self, encrypted_temporary_private_key: Path, monkeypatch): """ Test get_private_key function with private_key_content and private_key_file in connection and raise airflow exception @@ -321,10 +329,9 @@ def test_get_private_key_raise_exception(self, encrypted_temporary_private_key: "private_key_file": str(encrypted_temporary_private_key), }, } + monkeypatch.setenv("AIRFLOW_CONN_TEST_CONN", Connection(**connection_kwargs).get_uri()) hook = SnowflakeSqlApiHookAsync(snowflake_conn_id="test_conn") - with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises( + with pytest.raises( AirflowException, match="The private_key_file and private_key_content extra fields are mutually " "exclusive. Please remove one.", @@ -348,7 +355,8 @@ def test_get_private_key_should_support_private_auth_with_encrypted_key( }, } with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + "os.environ", + AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri(), ): hook = SnowflakeSqlApiHookAsync(snowflake_conn_id="test_conn") hook.get_private_key() @@ -357,6 +365,7 @@ def test_get_private_key_should_support_private_auth_with_encrypted_key( def test_get_private_key_should_support_private_auth_with_unencrypted_key( self, non_encrypted_temporary_private_key, + monkeypatch, ): connection_kwargs = { **BASE_CONNECTION_KWARGS, @@ -371,22 +380,23 @@ def test_get_private_key_should_support_private_auth_with_unencrypted_key( }, } with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + "os.environ", + AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri(), ): hook = SnowflakeSqlApiHookAsync(snowflake_conn_id="test_conn") hook.get_private_key() assert hook.private_key is not None connection_kwargs["password"] = "" with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + "os.environ", + AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri(), ): hook = SnowflakeSqlApiHookAsync(snowflake_conn_id="test_conn") hook.get_private_key() assert hook.private_key is not None connection_kwargs["password"] = _PASSWORD - with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises(TypeError, match="Password was given but private key is not encrypted."): + monkeypatch.setenv("AIRFLOW_CONN_TEST_CONN", Connection(**connection_kwargs).get_uri()) + with pytest.raises(TypeError, match="Password was given but private key is not encrypted."): SnowflakeSqlApiHookAsync(snowflake_conn_id="test_conn").get_private_key() @pytest.mark.asyncio @@ -419,9 +429,21 @@ def test_get_private_key_should_support_private_auth_with_unencrypted_key( "statement_handles": ["uuid", "uuid1"], }, ), - (202, {}, {"status": "running", "message": "Query statements are still running"}), - (422, {"status": "error", "message": "test"}, {"status": "error", "message": "test"}), - (404, {"status": "error", "message": "test"}, {"status": "error", "message": "test"}), + ( + 202, + {}, + {"status": "running", "message": "Query statements are still running"}, + ), + ( + 422, + {"status": "error", "message": "test"}, + {"status": "error", "message": "test"}, + ), + ( + 404, + {"status": "error", "message": "test"}, + {"status": "error", "message": "test"}, + ), ], ) @mock.patch( @@ -430,7 +452,12 @@ def test_get_private_key_should_support_private_auth_with_unencrypted_key( ) @mock.patch("astronomer.providers.snowflake.hooks.snowflake_sql_api.aiohttp.ClientSession.get") async def test_get_sql_api_query_status( - self, mock_get, mock_geturl_header_params, status_code, response, expected_response + self, + mock_get, + mock_geturl_header_params, + status_code, + response, + expected_response, ): """Test Async get_sql_api_query_status function by mocking the status, response and expected response""" req_id = uuid.uuid4()