diff --git a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py index 0b6b25dc6500d..d31c799d95661 100644 --- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +++ b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py @@ -44,7 +44,7 @@ ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -DAG_ID = "example_gcp_cloud_build" +DAG_ID = "gcp_cloud_build" GCP_SOURCE_ARCHIVE_URL = "gs://airflow-system-tests-resources/cloud-build/file.tar.gz" # Repository with this name is expected created within the project $SYSTEM_TESTS_GCP_PROJECT @@ -52,21 +52,21 @@ # 1. Create Cloud Source Repository # 2. Push into a master branch the following file: # tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml -GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repo" +GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository" CURRENT_FOLDER = Path(__file__).parent # [START howto_operator_gcp_create_build_from_storage_body] CREATE_BUILD_FROM_STORAGE_BODY = { "source": {"storage_source": GCP_SOURCE_ARCHIVE_URL}, - "steps": [{"name": "ubuntu", "args": ["echo", "Hello world"]}], + "steps": [{"name": "ubuntu", "args": ["echo", "Hello world", "sleep 200"]}], } # [END howto_operator_gcp_create_build_from_storage_body] # [START howto_operator_create_build_from_repo_body] CREATE_BUILD_FROM_REPO_BODY: dict[str, Any] = { "source": {"repo_source": {"repo_name": GCP_SOURCE_REPOSITORY_NAME, "branch_name": "master"}}, - "steps": [{"name": "ubuntu", "args": ["echo", "Hello world"]}], + "steps": [{"name": "ubuntu", "args": ["echo", "Hello world", "sleep 200"]}], } # [END howto_operator_create_build_from_repo_body] @@ -76,7 +76,7 @@ schedule="@once", start_date=datetime(2021, 1, 1), catchup=False, - tags=["example"], + tags=["example", "cloud_build"], ) as dag: @task_group(group_id="build_from_storage") diff --git a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py b/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py index fa8bf9b7f3be7..ac58a1c567d46 100644 --- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +++ b/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py @@ -25,9 +25,14 @@ from datetime import datetime from typing import Any, cast +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +from airflow.exceptions import AirflowException from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg +from airflow.operators.python import PythonOperator from airflow.providers.google.cloud.operators.cloud_build import ( CloudBuildCreateBuildTriggerOperator, CloudBuildDeleteBuildTriggerOperator, @@ -42,16 +47,17 @@ ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -DAG_ID = "example_gcp_cloud_build_trigger" +DAG_ID = "gcp_cloud_build_trigger" # Repository with this name is expected created within the project $SYSTEM_TESTS_GCP_PROJECT # If you'd like to run this system test locally, please # 1. Create Cloud Source Repository # 2. Push into a master branch the following file: # tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml -GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repo" +GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository" -TRIGGER_NAME = f"cloud-build-trigger-{ENV_ID}" +TRIGGER_NAME = f"cloud-build-trigger-{ENV_ID}".replace("_", "-") +PROJECT_NUMBER = "{{ task_instance.xcom_pull('get_project_number') }}" # [START howto_operator_gcp_create_build_trigger_body] create_build_trigger_body = { @@ -59,9 +65,10 @@ "trigger_template": { "project_id": PROJECT_ID, "repo_name": GCP_SOURCE_REPOSITORY_NAME, - "branch_name": "main", + "branch_name": "master", }, "filename": "example_cloud_build.yaml", + "service_account": f"projects/{PROJECT_ID}/serviceAccounts/{PROJECT_NUMBER}-compute@developer.gserviceaccount.com", } # [END howto_operator_gcp_create_build_trigger_body] @@ -74,23 +81,40 @@ "branch_name": "master", }, "filename": "example_cloud_build.yaml", + "service_account": f"projects/{PROJECT_ID}/serviceAccounts/{PROJECT_NUMBER}-compute@developer.gserviceaccount.com", } # [END START howto_operator_gcp_update_build_trigger_body] # [START howto_operator_create_build_from_repo_body] create_build_from_repo_body: dict[str, Any] = { "source": {"repo_source": {"repo_name": GCP_SOURCE_REPOSITORY_NAME, "branch_name": "master"}}, - "steps": [{"name": "ubuntu", "args": ["echo", "Hello world"]}], + "steps": [{"name": "ubuntu", "args": ["echo", "Hello world", "sleep 200"]}], } # [END howto_operator_create_build_from_repo_body] +def get_project_number(): + """Helper function to retrieve the number of the project based on PROJECT_ID""" + try: + with build("cloudresourcemanager", "v1") as service: + response = service.projects().get(projectId=PROJECT_ID).execute() + return response["projectNumber"] + except HttpError as exc: + if exc.status_code == 403: + raise AirflowException( + "No project found with specified name, " + "or caller does not have permissions to read specified project" + ) + else: + raise exc + + with DAG( DAG_ID, schedule="@once", start_date=datetime(2021, 1, 1), catchup=False, - tags=["example"], + tags=["example", "cloud_build_trigger"], ) as dag: # [START howto_operator_create_build_trigger] create_build_trigger = CloudBuildCreateBuildTriggerOperator( @@ -100,6 +124,11 @@ build_trigger_id = cast(str, XComArg(create_build_trigger, key="id")) + get_project_number = PythonOperator( + task_id="get_project_number", + python_callable=get_project_number, + ) + # [START howto_operator_run_build_trigger] run_build_trigger = CloudBuildRunBuildTriggerOperator( task_id="run_build_trigger", @@ -145,6 +174,7 @@ # [END howto_operator_list_build_triggers] chain( + get_project_number, create_build_trigger, run_build_trigger, update_build_trigger,