Skip to content

Commit

Permalink
Fix resource naming conflict in Dataplex system tests (apache#38737)
Browse files Browse the repository at this point in the history
  • Loading branch information
e-galan authored Apr 5, 2024
1 parent 8712674 commit 844cb8b
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 29 deletions.
28 changes: 13 additions & 15 deletions tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@

DAG_ID = "example_dataplex_data_profile"

LAKE_ID = f"test-lake-{ENV_ID}"
LAKE_ID = f"lake-{DAG_ID}-{ENV_ID}".replace("_", "-")
REGION = "us-central1"

DATASET_NAME = f"dataset_bq_{ENV_ID}"
DATASET = f"dataset_bq_{DAG_ID}_{ENV_ID}"

TABLE_1 = "table0"
TABLE_2 = "table1"
Expand All @@ -70,15 +70,14 @@
{"name": "dt", "type": "STRING", "mode": "NULLABLE"},
]

DATASET = DATASET_NAME
INSERT_DATE = datetime.now().strftime("%Y-%m-%d")
INSERT_ROWS_QUERY = f"INSERT {DATASET}.{TABLE_1} VALUES (1, 'test test2', '{INSERT_DATE}');"
LOCATION = "us"

TRIGGER_SPEC_TYPE = "ON_DEMAND"

ZONE_ID = "test-zone-id"
DATA_SCAN_ID = "test-data-scan-id"
ZONE_ID = f"zone-id-{DAG_ID}-{ENV_ID}".replace("_", "-")
DATA_SCAN_ID = f"data-scan-id-{DAG_ID}-{ENV_ID}".replace("_", "-")

EXAMPLE_LAKE_BODY = {
"display_name": "test_display_name",
Expand All @@ -94,11 +93,11 @@
}
# [END howto_dataplex_zone_configuration]

ASSET_ID = "test-asset-id"
ASSET_ID = f"asset-id-{DAG_ID}-{ENV_ID}".replace("_", "-")

# [START howto_dataplex_asset_configuration]
EXAMPLE_ASSET = {
"resource_spec": {"name": f"projects/{PROJECT_ID}/datasets/{DATASET_NAME}", "type_": "BIGQUERY_DATASET"},
"resource_spec": {"name": f"projects/{PROJECT_ID}/datasets/{DATASET}", "type_": "BIGQUERY_DATASET"},
"discovery_spec": {"enabled": True},
}
# [END howto_dataplex_asset_configuration]
Expand Down Expand Up @@ -130,17 +129,17 @@
schedule="@once",
tags=["example", "dataplex", "data_profile"],
) as dag:
create_dataset = BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", dataset_id=DATASET_NAME)
create_dataset = BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", dataset_id=DATASET)
create_table_1 = BigQueryCreateEmptyTableOperator(
task_id="create_table_1",
dataset_id=DATASET_NAME,
dataset_id=DATASET,
table_id=TABLE_1,
schema_fields=SCHEMA,
location=LOCATION,
)
create_table_2 = BigQueryCreateEmptyTableOperator(
task_id="create_table_2",
dataset_id=DATASET_NAME,
dataset_id=DATASET,
table_id=TABLE_2,
schema_fields=SCHEMA,
location=LOCATION,
Expand Down Expand Up @@ -265,28 +264,28 @@
lake_id=LAKE_ID,
zone_id=ZONE_ID,
asset_id=ASSET_ID,
trigger_rule=TriggerRule.ALL_DONE,
)
# [END howto_dataplex_delete_asset_operator]
delete_asset.trigger_rule = TriggerRule.ALL_DONE
# [START howto_dataplex_delete_zone_operator]
delete_zone = DataplexDeleteZoneOperator(
task_id="delete_zone",
project_id=PROJECT_ID,
region=REGION,
lake_id=LAKE_ID,
zone_id=ZONE_ID,
trigger_rule=TriggerRule.ALL_DONE,
)
# [END howto_dataplex_delete_zone_operator]
delete_zone.trigger_rule = TriggerRule.ALL_DONE
# [START howto_dataplex_delete_data_profile_operator]
delete_data_scan = DataplexDeleteDataProfileScanOperator(
task_id="delete_data_scan",
project_id=PROJECT_ID,
region=REGION,
data_scan_id=DATA_SCAN_ID,
trigger_rule=TriggerRule.ALL_DONE,
)
# [END howto_dataplex_delete_data_profile_operator]
delete_data_scan.trigger_rule = TriggerRule.ALL_DONE
delete_lake = DataplexDeleteLakeOperator(
project_id=PROJECT_ID,
region=REGION,
Expand All @@ -296,7 +295,7 @@
)
delete_dataset = BigQueryDeleteDatasetOperator(
task_id="delete_dataset",
dataset_id=DATASET_NAME,
dataset_id=DATASET,
project_id=PROJECT_ID,
delete_contents=True,
trigger_rule=TriggerRule.ALL_DONE,
Expand All @@ -321,7 +320,6 @@
get_data_scan_job_result_2,
run_data_scan_def,
run_data_scan_async_2,
# get_data_scan_job_result_def,
# TEST TEARDOWN
delete_asset,
delete_zone,
Expand Down
27 changes: 13 additions & 14 deletions tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@

DAG_ID = "example_dataplex_data_quality"

LAKE_ID = f"test-lake-{ENV_ID}"
LAKE_ID = f"lake-{DAG_ID}-{ENV_ID}".replace("_", "-")
REGION = "us-central1"

DATASET_NAME = f"dataset_bq_{ENV_ID}"
DATASET = f"dataset_bq_{DAG_ID}_{ENV_ID}"

TABLE_1 = "table0"
TABLE_2 = "table1"
Expand All @@ -70,15 +70,14 @@
{"name": "dt", "type": "STRING", "mode": "NULLABLE"},
]

DATASET = DATASET_NAME
INSERT_DATE = datetime.now().strftime("%Y-%m-%d")
INSERT_ROWS_QUERY = f"INSERT {DATASET}.{TABLE_1} VALUES (1, 'test test2', '{INSERT_DATE}');"
LOCATION = "us"

TRIGGER_SPEC_TYPE = "ON_DEMAND"

ZONE_ID = "test-zone-id"
DATA_SCAN_ID = "test-data-scan-id"
ZONE_ID = f"zone-id-{DAG_ID}-{ENV_ID}".replace("_", "-")
DATA_SCAN_ID = f"data-scan-id-{DAG_ID}-{ENV_ID}".replace("_", "-")

EXAMPLE_LAKE_BODY = {
"display_name": "test_display_name",
Expand All @@ -94,11 +93,11 @@
}
# [END howto_dataplex_zone_configuration]

ASSET_ID = "test-asset-id"
ASSET_ID = f"asset-id-{DAG_ID}-{ENV_ID}".replace("_", "-")

# [START howto_dataplex_asset_configuration]
EXAMPLE_ASSET = {
"resource_spec": {"name": f"projects/{PROJECT_ID}/datasets/{DATASET_NAME}", "type_": "BIGQUERY_DATASET"},
"resource_spec": {"name": f"projects/{PROJECT_ID}/datasets/{DATASET}", "type_": "BIGQUERY_DATASET"},
"discovery_spec": {"enabled": True},
}
# [END howto_dataplex_asset_configuration]
Expand Down Expand Up @@ -154,17 +153,17 @@
schedule="@once",
tags=["example", "dataplex", "data_quality"],
) as dag:
create_dataset = BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", dataset_id=DATASET_NAME)
create_dataset = BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", dataset_id=DATASET)
create_table_1 = BigQueryCreateEmptyTableOperator(
task_id="create_table_1",
dataset_id=DATASET_NAME,
dataset_id=DATASET,
table_id=TABLE_1,
schema_fields=SCHEMA,
location=LOCATION,
)
create_table_2 = BigQueryCreateEmptyTableOperator(
task_id="create_table_2",
dataset_id=DATASET_NAME,
dataset_id=DATASET,
table_id=TABLE_2,
schema_fields=SCHEMA,
location=LOCATION,
Expand Down Expand Up @@ -298,28 +297,28 @@
lake_id=LAKE_ID,
zone_id=ZONE_ID,
asset_id=ASSET_ID,
trigger_rule=TriggerRule.ALL_DONE,
)
# [END howto_dataplex_delete_asset_operator]
delete_asset.trigger_rule = TriggerRule.ALL_DONE
# [START howto_dataplex_delete_zone_operator]
delete_zone = DataplexDeleteZoneOperator(
task_id="delete_zone",
project_id=PROJECT_ID,
region=REGION,
lake_id=LAKE_ID,
zone_id=ZONE_ID,
trigger_rule=TriggerRule.ALL_DONE,
)
# [END howto_dataplex_delete_zone_operator]
delete_zone.trigger_rule = TriggerRule.ALL_DONE
# [START howto_dataplex_delete_data_quality_operator]
delete_data_scan = DataplexDeleteDataQualityScanOperator(
task_id="delete_data_scan",
project_id=PROJECT_ID,
region=REGION,
data_scan_id=DATA_SCAN_ID,
trigger_rule=TriggerRule.ALL_DONE,
)
# [END howto_dataplex_delete_data_quality_operator]
delete_data_scan.trigger_rule = TriggerRule.ALL_DONE
delete_lake = DataplexDeleteLakeOperator(
project_id=PROJECT_ID,
region=REGION,
Expand All @@ -329,7 +328,7 @@
)
delete_dataset = BigQueryDeleteDatasetOperator(
task_id="delete_dataset",
dataset_id=DATASET_NAME,
dataset_id=DATASET,
project_id=PROJECT_ID,
delete_contents=True,
trigger_rule=TriggerRule.ALL_DONE,
Expand Down

0 comments on commit 844cb8b

Please sign in to comment.