Skip to content

Commit

Permalink
Resolve PT012 in apache.spark, fab, ftp, openai and `papermil…
Browse files Browse the repository at this point in the history
…l` providers tests (apache#38272)
  • Loading branch information
Taragolis authored Mar 19, 2024
1 parent dd2b42b commit cd79958
Show file tree
Hide file tree
Showing 6 changed files with 37 additions and 35 deletions.
6 changes: 1 addition & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -1415,7 +1415,7 @@ combine-as-imports = true
"tests/providers/elasticsearch/log/elasticmock/__init__.py" = ["E402"]
"tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"]
"tests/providers/openai/hooks/test_openai.py" = ["E402"]
"tests/providers/openai/operators/test_openai.py" = ["E402", "PT012"]
"tests/providers/openai/operators/test_openai.py" = ["E402"]
"tests/providers/qdrant/hooks/test_qdrant.py" = ["E402"]
"tests/providers/qdrant/operators/test_qdrant.py" = ["E402"]
"tests/providers/snowflake/operators/test_snowflake_sql.py" = ["E402"]
Expand Down Expand Up @@ -1504,7 +1504,6 @@ combine-as-imports = true
"tests/providers/apache/beam/hooks/test_beam.py" = ["PT012"]
"tests/providers/apache/hive/hooks/test_hive.py" = ["PT012"]
"tests/providers/apache/hive/sensors/test_named_hive_partition.py" = ["PT012"]
"tests/providers/apache/spark/hooks/test_spark_sql.py" = ["PT012"]
"tests/providers/celery/sensors/test_celery_queue.py" = ["PT012"]
"tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py" = ["PT012"]
"tests/providers/cncf/kubernetes/hooks/test_kubernetes.py" = ["PT012"]
Expand All @@ -1518,8 +1517,6 @@ combine-as-imports = true
"tests/providers/databricks/sensors/test_databricks_partition.py" = ["PT012"]
"tests/providers/datadog/sensors/test_datadog.py" = ["PT012"]
"tests/providers/dbt/cloud/operators/test_dbt.py" = ["PT012"]
"tests/providers/fab/auth_manager/cli_commands/test_user_command.py" = ["PT012"]
"tests/providers/ftp/operators/test_ftp.py" = ["PT012"]
"tests/providers/google/cloud/hooks/test_bigquery.py" = ["PT012"]
"tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py" = ["PT012"]
"tests/providers/google/cloud/hooks/test_dataflow.py" = ["PT012"]
Expand Down Expand Up @@ -1548,7 +1545,6 @@ combine-as-imports = true
"tests/providers/microsoft/azure/hooks/test_wasb.py" = ["PT012"]
"tests/providers/microsoft/psrp/hooks/test_psrp.py" = ["PT012"]
"tests/providers/oracle/hooks/test_oracle.py" = ["PT012"]
"tests/providers/papermill/operators/test_papermill.py" = ["PT012"]
"tests/providers/sftp/hooks/test_sftp.py" = ["PT012"]
"tests/providers/sftp/operators/test_sftp.py" = ["PT012"]
"tests/providers/sftp/sensors/test_sftp.py" = ["PT012"]
Expand Down
10 changes: 5 additions & 5 deletions tests/providers/apache/spark/hooks/test_spark_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,12 +202,12 @@ def test_spark_process_runcmd_and_fail(self, mock_popen):
mock_popen.return_value.wait.return_value = status

# When
hook = SparkSqlHook(
conn_id="spark_default",
sql=sql,
master=master,
)
with pytest.raises(AirflowException) as ctx:
hook = SparkSqlHook(
conn_id="spark_default",
sql=sql,
master=master,
)
hook.run_query(params)

# Then
Expand Down
29 changes: 18 additions & 11 deletions tests/providers/fab/auth_manager/cli_commands/test_user_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,21 +404,28 @@ def test_cli_remove_user_role(self, create_user_test4):
), "User should have been removed from role 'Viewer'"

@pytest.mark.parametrize(
"action, role, message",
"role, message",
[
["add-role", "Viewer", 'User "test4" is already a member of role "Viewer"'],
["add-role", "Foo", '"Foo" is not a valid role. Valid roles are'],
["remove-role", "Admin", 'User "test4" is not a member of role "Admin"'],
["remove-role", "Foo", '"Foo" is not a valid role. Valid roles are'],
["Viewer", 'User "test4" is already a member of role "Viewer"'],
["Foo", '"Foo" is not a valid role. Valid roles are'],
],
)
def test_cli_manage_roles_exceptions(self, create_user_test4, action, role, message):
args = self.parser.parse_args(["users", action, "--username", "test4", "--role", role])
def test_cli_manage_add_role_exceptions(self, create_user_test4, role, message):
args = self.parser.parse_args(["users", "add-role", "--username", "test4", "--role", role])
with pytest.raises(SystemExit, match=message):
if action == "add-role":
user_command.add_role(args)
else:
user_command.remove_role(args)
user_command.add_role(args)

@pytest.mark.parametrize(
"role, message",
[
["Admin", 'User "test4" is not a member of role "Admin"'],
["Foo", '"Foo" is not a valid role. Valid roles are'],
],
)
def test_cli_manage_remove_role_exceptions(self, create_user_test4, role, message):
args = self.parser.parse_args(["users", "remove-role", "--username", "test4", "--role", role])
with pytest.raises(SystemExit, match=message):
user_command.remove_role(args)

@pytest.mark.parametrize(
"user, message",
Expand Down
16 changes: 8 additions & 8 deletions tests/providers/ftp/operators/test_ftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,15 +152,15 @@ def test_arg_checking(self, mock_put):
assert task_0.ftp_conn_id == DEFAULT_CONN_ID

# Exception should be raised if operation is invalid
task_1 = FTPFileTransmitOperator(
task_id="test_ftp_args_1",
ftp_conn_id=DEFAULT_CONN_ID,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation="invalid_operation",
dag=dag,
)
with pytest.raises(TypeError, match="Unsupported operation value invalid_operation, "):
task_1 = FTPFileTransmitOperator(
task_id="test_ftp_args_1",
ftp_conn_id=DEFAULT_CONN_ID,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation="invalid_operation",
dag=dag,
)
task_1.execute(None)

def test_unequal_local_remote_file_paths(self):
Expand Down
8 changes: 4 additions & 4 deletions tests/providers/openai/operators/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ def test_execute_with_input_text():

@pytest.mark.parametrize("invalid_input", ["", None, 123])
def test_execute_with_invalid_input(invalid_input):
operator = OpenAIEmbeddingOperator(
task_id="TaskId", conn_id="test_conn_id", model="test_model", input_text=invalid_input
)
context = Context()
with pytest.raises(ValueError):
operator = OpenAIEmbeddingOperator(
task_id="TaskId", conn_id="test_conn_id", model="test_model", input_text=invalid_input
)
context = Context()
operator.execute(context)
3 changes: 1 addition & 2 deletions tests/providers/papermill/operators/test_papermill.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import pytest

from airflow.providers.papermill.operators.papermill import PapermillOperator
from airflow.utils import timezone

DEFAULT_DATE = timezone.datetime(2021, 1, 1)
Expand All @@ -44,8 +45,6 @@ class TestPapermillOperator:
def test_mandatory_attributes(self):
"""Test missing Input or Output notebooks."""
with pytest.raises(ValueError, match="Input notebook is not specified"):
from airflow.providers.papermill.operators.papermill import PapermillOperator

PapermillOperator(task_id="missing_input_nb", output_nb="foo-bar")

with pytest.raises(ValueError, match="Output notebook is not specified"):
Expand Down

0 comments on commit cd79958

Please sign in to comment.