Skip to content

Commit

Permalink
without asset in internal
Browse files Browse the repository at this point in the history
  • Loading branch information
laysabit committed May 7, 2024
1 parent d4a1cc9 commit 1ca3a1e
Showing 1 changed file with 2 additions and 29 deletions.
31 changes: 2 additions & 29 deletions dags/history_tables_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,9 +204,7 @@
delete_old_ledger_pub_task = build_delete_data_task(
dag, public_project, public_dataset, table_names["ledgers"], "pub"
)
delete_old_asset_task = build_delete_data_task(
dag, internal_project, internal_dataset, table_names["assets"]
)

delete_old_asset_pub_task = build_delete_data_task(
dag, public_project, public_dataset, table_names["assets"], "pub"
)
Expand All @@ -216,17 +214,6 @@
The send tasks receive the location of the file in Google Cloud storage through Airflow's XCOM system.
Then, the task merges the unique entries in the file into the corresponding table in BigQuery.
"""
send_assets_to_bq_task = build_gcs_to_bq_task(
dag,
asset_export_task.task_id,
internal_project,
internal_dataset,
table_names["assets"],
"",
partition=True,
cluster=True,
)


"""
Load final public dataset, crypto-stellar
Expand Down Expand Up @@ -355,15 +342,7 @@
insert_enriched_hist_pub_task,
]
)
dedup_assets_bq_task = build_bq_insert_job(
dag,
internal_project,
internal_dataset,
table_names["assets"],
partition=True,
cluster=True,
create=True,
)

dedup_assets_pub_task = build_bq_insert_job(
dag,
public_project,
Expand All @@ -386,12 +365,6 @@
time_task
>> write_asset_stats
>> asset_export_task
>> delete_old_asset_task
>> send_assets_to_bq_task
>> dedup_assets_bq_task
)
(
asset_export_task
>> delete_old_asset_pub_task
>> send_assets_to_pub_task
>> dedup_assets_pub_task
Expand Down

0 comments on commit 1ca3a1e

Please sign in to comment.