Skip to content

Commit

Permalink
small fixes for system tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Ulada Zakharava committed Oct 1, 2024
1 parent 8f7616c commit 4ce272b
Show file tree
Hide file tree
Showing 22 changed files with 46 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
DAG_ID = "example_automl_video_clss"
DAG_ID = "automl_video_clss"
REGION = "us-central1"
VIDEO_DISPLAY_NAME = f"auto-ml-video-clss-{ENV_ID}"
MODEL_DISPLAY_NAME = f"auto-ml-video-clss-model-{ENV_ID}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
DAG_ID = "example_automl_video_track"
DAG_ID = "automl_video_track"
REGION = "us-central1"
VIDEO_DISPLAY_NAME = f"auto-ml-video-tracking-{ENV_ID}"
MODEL_DISPLAY_NAME = f"auto-ml-video-tracking-model-{ENV_ID}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID

DAG_ID = "example_gcp_bigquery_dts"
DAG_ID = "gcp_bigquery_dts"

BUCKET_NAME = f"bucket-{DAG_ID}-{ENV_ID}"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")

DAG_ID = "example_bigquery_queries_async"
DAG_ID = "bigquery_queries_async"

DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}".replace("-", "_")
LOCATION = "us"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
DAG_ID = "example_bigquery_sensors"
DAG_ID = "bigquery_sensors"

DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}".replace("-", "_")
TABLE_NAME = f"partitioned_table_{DAG_ID}_{ENV_ID}".replace("-", "_")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
DAG_ID = "example_bigquery_transfer"
DAG_ID = "bigquery_transfer"

DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_gcp_function"
DAG_ID = "gcp_function"
LOCATION = "europe-west1"

# make sure there are no dashes in function name (!)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,11 @@ def _create_service():


with DAG(
"example_cloud_run_service",
"cloud_run_service",
schedule="@once",
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["cloud-run-service-example"],
tags=["cloud-run", "service", "example"],
) as dag:
# [START howto_operator_cloud_run_create_service]
create_cloud_run_service = CloudRunCreateServiceOperator(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def cloud_sql_database_create_body(instance: str) -> dict[str, Any]:
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
schedule="@once",
catchup=False,
tags=["example", "cloudsql", "postgres"],
) as dag:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def cloud_sql_database_create_body(instance: str) -> dict[str, Any]:
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
schedule="@once",
catchup=False,
tags=["example", "cloudsql", "postgres"],
) as dag:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")

DAG_ID = "example_composer"
DAG_ID = "composer"
REGION = "us-central1"

# [START howto_operator_composer_simple_environment]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
},
job_class="org.apache.beam.examples.WordCount",
dataflow_config={
"job_name": "test-java-pipeline-job",
"job_name": f"java-pipeline-job-{ENV_ID}",
"check_if_running": CheckJobRunning.IgnoreJob,
"location": LOCATION,
"poll_sleep": 10,
Expand All @@ -124,7 +124,7 @@
},
job_class="org.apache.beam.examples.WordCount",
dataflow_config={
"job_name": "test-deferrable-java-pipeline-job",
"job_name": f"deferrable-java-pipeline-job-{ENV_ID}",
"check_if_running": CheckJobRunning.WaitForRun,
"location": LOCATION,
"poll_sleep": 10,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_dataflow_sql"
DAG_ID = "dataflow_sql"
LOCATION = "europe-west3"
DATAFLOW_SQL_JOB_NAME = f"{DAG_ID}_{ENV_ID}".replace("_", "-")
BQ_SQL_DATASET = f"{DAG_ID}_{ENV_ID}".replace("-", "_")
Expand All @@ -57,7 +57,7 @@
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
schedule="@once",
catchup=False,
tags=["example", "dataflow-sql"],
) as dag:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule="@once",
catchup=False,
tags=["example", "dataflow", "yaml"],
) as dag:
Expand Down
4 changes: 2 additions & 2 deletions tests/system/providers/google/cloud/gcs/example_firestore.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@
from airflow.providers.google.firebase.operators.firestore import CloudFirestoreExportDatabaseOperator
from airflow.utils.trigger_rule import TriggerRule

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
DAG_ID = "example_firestore_to_gcp"
DAG_ID = "firestore_to_gcp"

BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@
)
from airflow.utils.trigger_rule import TriggerRule

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_kubernetes_engine_kueue"
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "kubernetes_engine_kueue"
GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")

GCP_LOCATION = "europe-west3"
Expand Down Expand Up @@ -167,12 +167,15 @@
)

(
# TEST SETUP
create_cluster
>> add_kueue_cluster
>> create_resource_flavor
>> create_cluster_queue
>> create_local_queue
# TEST BODY
>> kueue_job_task
# TEST TEARDOWN
>> delete_cluster
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
DAG_ID = "example_life_sciences"
DAG_ID = "life_sciences"

BUCKET_NAME = f"bucket_{DAG_ID}-{ENV_ID}"

Expand Down Expand Up @@ -91,7 +91,7 @@
schedule="@once",
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example"],
tags=["example", "life-sciences"],
) as dag:
create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@
CloudNaturalLanguageClassifyTextOperator,
)

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_gcp_natural_language"
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "gcp_natural_language"

# [START howto_operator_gcp_natural_language_document_text]
TEXT = """Airflow is a platform to programmatically author, schedule and monitor workflows.
Expand All @@ -60,7 +60,7 @@
schedule="@once", # Override to match your needs
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example"],
tags=["example", "natural-language"],
) as dag:
# [START howto_operator_gcp_natural_language_analyze_entities]
analyze_entities = CloudNaturalLanguageAnalyzeEntitiesOperator(
Expand Down Expand Up @@ -119,6 +119,12 @@
analyze_sentiment >> analyze_sentiment_result
analyze_classify_text >> analyze_classify_text_result

from tests.system.utils.watcher import watcher

# This test needs watcher in order to properly mark success/failure
# when "teardown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()

from tests.system.utils import get_test_run # noqa: E402

# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,14 @@
from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.translate import CloudTranslateTextOperator

DAG_ID = "example_gcp_translate"
DAG_ID = "gcp_translate"

with DAG(
DAG_ID,
schedule="@once", # Override to match your needs
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example"],
tags=["example", "translate"],
) as dag:
# [START howto_operator_translate_text]
product_set_create = CloudTranslateTextOperator(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@
from airflow.providers.google.cloud.operators.translate_speech import CloudTranslateSpeechOperator
from airflow.utils.trigger_rule import TriggerRule

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")

DAG_ID = "example_gcp_translate_speech"
DAG_ID = "gcp_translate_speech"

BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"

Expand Down Expand Up @@ -60,7 +60,7 @@
schedule="@once", # Override to match your needs
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example"],
tags=["example", "translate-speech"],
) as dag:
create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,11 @@ def TABULAR_DATASET(bucket_name):
MODEL_SERVING_CONTAINER_URI = "gcr.io/cloud-aiplatform/prediction/tf2-cpu.2-2:latest"
REPLICA_COUNT = 1

# VERTEX_AI_LOCAL_TRAINING_SCRIPT_PATH should be set for Airflow which is running on distributed system.
# LOCAL_TRAINING_SCRIPT_PATH should be set for Airflow which is running on distributed system.
# For example in Composer the correct path is `gcs/data/california_housing_training_script.py`.
# Because `gcs/data/` is shared folder for Airflow's workers.
LOCAL_TRAINING_SCRIPT_PATH = os.environ.get(
"VERTEX_AI_LOCAL_TRAINING_SCRIPT_PATH", "california_housing_training_script.py"
)
IS_COMPOSER = bool(os.environ.get("COMPOSER_ENVIRONMENT", ""))
LOCAL_TRAINING_SCRIPT_PATH = "gcs/data/california_housing_training_script.py" if IS_COMPOSER else ""


with DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")

DAG_ID = "example_cloud_workflows"
DAG_ID = "cloud_workflows"

LOCATION = "us-central1"
WORKFLOW_ID = f"workflow-{DAG_ID}-{ENV_ID}".replace("_", "-")
Expand Down

0 comments on commit 4ce272b

Please sign in to comment.