From 7db8a1783b747c4960232c58a3589f41cb25f02e Mon Sep 17 00:00:00 2001 From: Peter Heise <2010@pheise.de> Date: Fri, 4 Mar 2022 09:52:20 +0100 Subject: [PATCH 01/16] Update create_external_table.sql Added table_format option to be used with Delta. --- macros/plugins/snowflake/create_external_table.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/macros/plugins/snowflake/create_external_table.sql b/macros/plugins/snowflake/create_external_table.sql index 721611c2..7bb46291 100644 --- a/macros/plugins/snowflake/create_external_table.sql +++ b/macros/plugins/snowflake/create_external_table.sql @@ -33,4 +33,5 @@ {% if external.pattern -%} pattern = '{{external.pattern}}' {%- endif %} {% if external.integration -%} integration = '{{external.integration}}' {%- endif %} file_format = {{external.file_format}} + {% if external.table_format -%} table_format = '{{external.table_format}}' {%- endif %} {% endmacro %} From cf89403a68125a5bf1f15dd179b752d2cefc1927 Mon Sep 17 00:00:00 2001 From: Peter Heise <2010@pheise.de> Date: Fri, 4 Mar 2022 09:54:50 +0100 Subject: [PATCH 02/16] Added documentation for Delta lake Added documentation for Delta lake --- sample_sources/snowflake.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/sample_sources/snowflake.yml b/sample_sources/snowflake.yml index 598d5073..83d710ea 100644 --- a/sample_sources/snowflake.yml +++ b/sample_sources/snowflake.yml @@ -60,4 +60,11 @@ sources: # # if you do not specify *any* columns for a snowpiped table, dbt will also # include `value`, the JSON blob of all file contents. - \ No newline at end of file + + - name: delta_tbl + description: "External table using Delta files" + external: + location: "@stage" # reference an existing external stage + file_format: "( type = parquet )" # fully specified here, or reference an existing file format + table_format: delta # specify the table format + auto_refresh: false # requires configuring an event notification from Amazon S3 or Azure From ae1f111ab0d954b7bb344d829aec3e033183e815 Mon Sep 17 00:00:00 2001 From: Spencer Carrucciu Date: Tue, 12 Jul 2022 10:27:32 -0400 Subject: [PATCH 03/16] Update bigquery column quoting Update bigquery create external table macro to enable column quoting --- macros/plugins/bigquery/create_external_table.sql | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/macros/plugins/bigquery/create_external_table.sql b/macros/plugins/bigquery/create_external_table.sql index cefa7024..b037d471 100644 --- a/macros/plugins/bigquery/create_external_table.sql +++ b/macros/plugins/bigquery/create_external_table.sql @@ -14,8 +14,9 @@ create or replace external table {{source(source_node.source_name, source_node.name)}} {%- if columns -%}( + {%- set column_quoted = adapter.quote(column.name) if column.quote else column.name %} {% for column in columns %} - {{column.name}} {{column.data_type}} {{- ',' if not loop.last -}} + {{column_quoted}} {{column.data_type}} {{- ',' if not loop.last -}} {%- endfor -%} ) {% endif %} From d0ab017018ea936a517d1471276ad1f0a761a4aa Mon Sep 17 00:00:00 2001 From: jeremyyeo Date: Wed, 24 Aug 2022 09:34:19 +1000 Subject: [PATCH 04/16] add error_integration config --- macros/plugins/snowflake/snowpipe/create_snowpipe.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/macros/plugins/snowflake/snowpipe/create_snowpipe.sql b/macros/plugins/snowflake/snowpipe/create_snowpipe.sql index 88385345..a9ba44eb 100644 --- a/macros/plugins/snowflake/snowpipe/create_snowpipe.sql +++ b/macros/plugins/snowflake/snowpipe/create_snowpipe.sql @@ -8,6 +8,7 @@ {% if snowpipe.auto_ingest -%} auto_ingest = {{snowpipe.auto_ingest}} {%- endif %} {% if snowpipe.aws_sns_topic -%} aws_sns_topic = '{{snowpipe.aws_sns_topic}}' {%- endif %} {% if snowpipe.integration -%} integration = '{{snowpipe.integration}}' {%- endif %} + {% if snowpipe.error_integration -%} error_integration = '{{snowpipe.error_integration}}' {%- endif %} as {{ dbt_external_tables.snowflake_get_copy_sql(source_node) }} {% endmacro %} From 7e1f0d0b62014b8726433aae4bebdb79d16134c7 Mon Sep 17 00:00:00 2001 From: jeremyyeo Date: Wed, 14 Sep 2022 21:06:40 +1200 Subject: [PATCH 05/16] pin pyodbc temporarily --- run_test.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/run_test.sh b/run_test.sh index f6be5a64..f7e795ab 100755 --- a/run_test.sh +++ b/run_test.sh @@ -10,6 +10,7 @@ if [[ ! -f $VENV ]]; then then echo "Installing dbt-spark" pip install dbt-spark[ODBC] --upgrade --pre + pip install pyodbc==4.0.32 # See https://github.com/dbt-labs/dbt-external-tables/issues/156 elif [ $1 == 'azuresql' ] then echo "Installing dbt-sqlserver" From 46c57102b20d7d851cb407a9b6d5386face218ba Mon Sep 17 00:00:00 2001 From: jeremyyeo Date: Wed, 14 Sep 2022 21:10:49 +1200 Subject: [PATCH 06/16] pin pyodbc temporarily --- run_test.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/run_test.sh b/run_test.sh index f6be5a64..0e3a940c 100755 --- a/run_test.sh +++ b/run_test.sh @@ -10,6 +10,7 @@ if [[ ! -f $VENV ]]; then then echo "Installing dbt-spark" pip install dbt-spark[ODBC] --upgrade --pre + pip install pyodbc==4.0.32 # See https://github.com/dbt-labs/dbt-external-tables/issues/156 elif [ $1 == 'azuresql' ] then echo "Installing dbt-sqlserver" From 12bd4bd68cbeff5e62ecfd2e83a377ac7371e735 Mon Sep 17 00:00:00 2001 From: jeremyyeo Date: Wed, 21 Sep 2022 20:38:32 +1200 Subject: [PATCH 07/16] fix run_test.sh --- run_test.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 run_test.sh diff --git a/run_test.sh b/run_test.sh old mode 100644 new mode 100755 From 410b21900acd0689a14a28bd5b8fcb8af826a78f Mon Sep 17 00:00:00 2001 From: Jeremy Yeo Date: Wed, 21 Sep 2022 21:31:48 +1200 Subject: [PATCH 08/16] Add self reference for macro (#163) Co-authored-by: Lasse Benninga --- .../plugins/spark/get_external_build_plan.sql | 4 ++-- .../spark/helpers/recover_partitions.sql | 21 ++++++++++++++++--- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/macros/plugins/spark/get_external_build_plan.sql b/macros/plugins/spark/get_external_build_plan.sql index 47ab3247..38afdac6 100644 --- a/macros/plugins/spark/get_external_build_plan.sql +++ b/macros/plugins/spark/get_external_build_plan.sql @@ -19,8 +19,8 @@ {% set build_plan = build_plan + dbt_external_tables.refresh_external_table(source_node) %} {% endif %} - {% set recover_partitions = spark__recover_partitions(source_node) %} - {% if recover_partitions|length > 0 %} + {% set recover_partitions = dbt_external_tables.recover_partitions(source_node) %} + {% if recover_partitions %} {% set build_plan = build_plan + [ recover_partitions ] %} diff --git a/macros/plugins/spark/helpers/recover_partitions.sql b/macros/plugins/spark/helpers/recover_partitions.sql index 2d20212b..bdc4b227 100644 --- a/macros/plugins/spark/helpers/recover_partitions.sql +++ b/macros/plugins/spark/helpers/recover_partitions.sql @@ -1,12 +1,27 @@ {% macro spark__recover_partitions(source_node) %} {# https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-alter-table.html #} - {% set ddl %} {%- if source_node.external.partitions and source_node.external.using and source_node.external.using|lower != 'delta' -%} - ALTER TABLE {{ source(source_node.source_name, source_node.name) }} RECOVER PARTITIONS + {% set ddl %} + ALTER TABLE {{ source(source_node.source_name, source_node.name) }} RECOVER PARTITIONS + {% endset %} + {%- else -%} + {% set ddl = none %} {%- endif -%} - {% endset %} {{return(ddl)}} {% endmacro %} + +{% macro recover_partitions(source_node) %} + {{ return(adapter.dispatch('recover_partitions', 'dbt_external_tables')(source_node)) }} +{% endmacro %} + +{% macro default__recover_partitions(source_node) %} + /*{# + We're dispatching this macro so that users can override it if required on other adapters + but this will work for spark/databricks. + #}*/ + + {{ exceptions.raise_not_implemented('recover_partitions macro not implemented for adapter ' + adapter.type()) }} +{% endmacro %} From 2a407bf4e99dbd3f479f4d8856312edcef01a675 Mon Sep 17 00:00:00 2001 From: Jeremy Yeo Date: Thu, 22 Sep 2022 11:05:18 +1200 Subject: [PATCH 09/16] Fix `column_quoted` set outside the columns loop (#166) --- .../models/plugins/bigquery/bigquery_external.yml | 4 ++-- macros/plugins/bigquery/create_external_table.sql | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/integration_tests/models/plugins/bigquery/bigquery_external.yml b/integration_tests/models/plugins/bigquery/bigquery_external.yml index 92e1e39a..3b81230b 100644 --- a/integration_tests/models/plugins/bigquery/bigquery_external.yml +++ b/integration_tests/models/plugins/bigquery/bigquery_external.yml @@ -37,7 +37,7 @@ sources: options: format: csv skip_leading_rows: 1 - hive_partition_uri_prefix: "'gs://dbt-external-tables-testing/csv'" + hive_partition_uri_prefix: 'gs://dbt-external-tables-testing/csv' partitions: &parts-of-the-people - name: section data_type: string @@ -50,7 +50,7 @@ sources: options: format: csv skip_leading_rows: 1 - hive_partition_uri_prefix: "'gs://dbt-external-tables-testing/csv'" + hive_partition_uri_prefix: 'gs://dbt-external-tables-testing/csv' tests: *equal-to-the-people - name: people_csv_override_uris diff --git a/macros/plugins/bigquery/create_external_table.sql b/macros/plugins/bigquery/create_external_table.sql index b037d471..af0970cf 100644 --- a/macros/plugins/bigquery/create_external_table.sql +++ b/macros/plugins/bigquery/create_external_table.sql @@ -14,8 +14,8 @@ create or replace external table {{source(source_node.source_name, source_node.name)}} {%- if columns -%}( - {%- set column_quoted = adapter.quote(column.name) if column.quote else column.name %} {% for column in columns %} + {%- set column_quoted = adapter.quote(column.name) if column.quote else column.name %} {{column_quoted}} {{column.data_type}} {{- ',' if not loop.last -}} {%- endfor -%} ) From e38cc50310de1a739809a2b48f7fa2790699e857 Mon Sep 17 00:00:00 2001 From: Joel Labes Date: Fri, 7 Oct 2022 17:18:54 +1300 Subject: [PATCH 10/16] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index fbe4357b..51e34d31 100644 --- a/README.md +++ b/README.md @@ -117,7 +117,7 @@ execute the appropriate `create`, `refresh`, and/or `drop` commands: If you encounter issues using this package or have questions, please check the [open issues](https://github.com/dbt-labs/dbt-external-tables/issues), as there's a chance it's a known limitation or work in progress. If not, you can: - open a new issue to report a bug or suggest an enhancement -- post a technical question to [StackOverflow](https://stackoverflow.com/questions/tagged/dbt) +- post a technical question to [the Community Forum](https://discourse.getdbt.com/c/help/19) - post a conceptual question to the relevant database channel (#db-redshift, #dbt-snowflake, etc) in the [dbt Slack community](https://community.getdbt.com/) -Additional contributions to this package are very welcome! Please create issues or open PRs against `master`. Check out [this post](https://discourse.getdbt.com/t/contributing-to-an-external-dbt-package/657) on the best workflow for contributing to a package. \ No newline at end of file +Additional contributions to this package are very welcome! Please create issues or open PRs against `master`. Check out [this post](https://discourse.getdbt.com/t/contributing-to-an-external-dbt-package/657) on the best workflow for contributing to a package. From 80d89acd0564055db1d42fb38fdbca1f8d9ce6f8 Mon Sep 17 00:00:00 2001 From: pgoslatara Date: Fri, 25 Nov 2022 03:39:37 +0100 Subject: [PATCH 11/16] Correcting handling partitions in Spark and adding sample source (#161) --- macros/plugins/spark/create_external_table.sql | 15 ++++++++++++--- sample_sources/spark.yml | 7 +++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/macros/plugins/spark/create_external_table.sql b/macros/plugins/spark/create_external_table.sql index 4232f474..0fc8c47e 100644 --- a/macros/plugins/spark/create_external_table.sql +++ b/macros/plugins/spark/create_external_table.sql @@ -5,10 +5,19 @@ {%- set partitions = external.partitions -%} {%- set options = external.options -%} + {%- set columns_and_partitions = columns | list -%} + {%- if partitions -%} + {%- for i in partitions -%} + {%- if i.name not in columns_and_partitions | list | map(attribute='name') -%} + {%- do columns_and_partitions.append(i) -%} + {%- endif -%} + {%- endfor -%} + {%- endif -%} + {# https://spark.apache.org/docs/latest/sql-data-sources-hive-tables.html #} create table {{source(source_node.source_name, source_node.name)}} - {%- if columns|length > 0 %} ( - {% for column in columns %} + {%- if columns | length > 0 %} ( + {% for column in columns_and_partitions %} {{column.name}} {{column.data_type}} {{- ',' if not loop.last -}} {% endfor %} @@ -21,7 +30,7 @@ ) {%- endif %} {% if partitions -%} partitioned by ( {%- for partition in partitions -%} - {{partition.name}} {{partition.data_type}}{{', ' if not loop.last}} + {{partition.name}}{{', ' if not loop.last}} {%- endfor -%} ) {%- endif %} {% if external.row_format -%} row format {{external.row_format}} {%- endif %} diff --git a/sample_sources/spark.yml b/sample_sources/spark.yml index 658e198c..6106cb3b 100644 --- a/sample_sources/spark.yml +++ b/sample_sources/spark.yml @@ -12,6 +12,13 @@ sources: sep: '|' header: 'true' timestampFormat: 'yyyy-MM-dd HH:mm' + partitions: + - name: year + data_type: int + - name: month + data_type: int + - name: day + data_type: int columns: - name: app_id From b7922b732df1fdbcb7452e3192eab30fdb1d52b0 Mon Sep 17 00:00:00 2001 From: dave-connors-3 <73915542+dave-connors-3@users.noreply.github.com> Date: Fri, 6 Jan 2023 11:42:41 -0600 Subject: [PATCH 12/16] add aws context (#179) --- .circleci/config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ede02ac6..996efd49 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -110,7 +110,8 @@ workflows: - integration-redshift - integration-snowflake - integration-bigquery - - integration-databricks + - integration-databricks: + context: aws-credentials #- integration-synapse #- integration-azuresql: # requires: From 9eacbd41489cfbaf5f8b4d6644147da4aa3fb0a8 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Mon, 9 Jan 2023 16:27:04 -0700 Subject: [PATCH 13/16] Use CircleCI contexts for environment variables (#180) * Use CircleCI contexts for environment variables * Update config.yml * Use project-specific environment variables for databricks rather than context * Temporarily disable CI for Databricks --- .circleci/config.yml | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 996efd49..96f354be 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -107,12 +107,19 @@ workflows: version: 2 test-all: jobs: - - integration-redshift - - integration-snowflake - - integration-bigquery - - integration-databricks: - context: aws-credentials - #- integration-synapse + - integration-redshift: + context: profile-redshift + - integration-snowflake: + context: profile-snowflake + - integration-bigquery: + context: profile-bigquery + #- integration-databricks: + # context: + # - aws-credentials + # - profile-databricks + #- integration-synapse: + # context: profile-synapse #- integration-azuresql: + # context: profile-azure # requires: # - integration-synapse From ae10c1e5ed4e52bfc9e79d509b9ccf2f2c0b9fb9 Mon Sep 17 00:00:00 2001 From: Jeremy Cohen Date: Mon, 16 Jan 2023 13:14:06 +0100 Subject: [PATCH 14/16] Passing the torch (#164) --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 0082d7cf..ff8cbb24 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @jtcohen6 \ No newline at end of file +* @jeremyyeo From f62a9c409569d02e4d8f89d2f7b79e104929fbd4 Mon Sep 17 00:00:00 2001 From: Jeremy Yeo Date: Tue, 31 Jan 2023 22:46:07 +1300 Subject: [PATCH 15/16] Fix databricks tests (#183) Co-authored-by: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> --- .circleci/config.yml | 15 ++++++--------- run_test.sh | 1 - 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 96f354be..9d179a4c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,11 +1,9 @@ - version: 2.1 orbs: azure-cli: circleci/azure-cli@1.1.0 jobs: - integration-redshift: docker: - image: cimg/python:3.9.9 @@ -16,7 +14,7 @@ jobs: command: ./run_test.sh redshift - store_artifacts: path: ./logs - + integration-snowflake: docker: - image: cimg/python:3.9.9 @@ -27,7 +25,7 @@ jobs: command: ./run_test.sh snowflake - store_artifacts: path: ./logs - + integration-bigquery: environment: BIGQUERY_SERVICE_KEY_PATH: "/home/circleci/bigquery-service-key.json" @@ -102,7 +100,6 @@ jobs: - store_artifacts: path: ./logs - workflows: version: 2 test-all: @@ -113,10 +110,10 @@ workflows: context: profile-snowflake - integration-bigquery: context: profile-bigquery - #- integration-databricks: - # context: - # - aws-credentials - # - profile-databricks + - integration-databricks: + context: + - aws-credentials + - profile-databricks #- integration-synapse: # context: profile-synapse #- integration-azuresql: diff --git a/run_test.sh b/run_test.sh index f7e795ab..f6be5a64 100755 --- a/run_test.sh +++ b/run_test.sh @@ -10,7 +10,6 @@ if [[ ! -f $VENV ]]; then then echo "Installing dbt-spark" pip install dbt-spark[ODBC] --upgrade --pre - pip install pyodbc==4.0.32 # See https://github.com/dbt-labs/dbt-external-tables/issues/156 elif [ $1 == 'azuresql' ] then echo "Installing dbt-sqlserver" From 34c1cc4bbcf28ccb0243807441bf642e801ad158 Mon Sep 17 00:00:00 2001 From: guillesd <74136033+guillesd@users.noreply.github.com> Date: Fri, 24 Mar 2023 10:00:27 +0100 Subject: [PATCH 16/16] Create schema for external tables when it is not there. (#167) --- macros/common/create_external_schema.sql | 11 +++++++++++ macros/plugins/bigquery/get_external_build_plan.sql | 5 ++++- .../plugins/snowflake/get_external_build_plan.sql | 6 +++++- macros/plugins/spark/get_external_build_plan.sql | 1 + macros/plugins/sqlserver/create_external_schema.sql | 13 +++++++++++++ .../plugins/sqlserver/get_external_build_plan.sql | 1 + 6 files changed, 35 insertions(+), 2 deletions(-) create mode 100644 macros/common/create_external_schema.sql create mode 100644 macros/plugins/sqlserver/create_external_schema.sql diff --git a/macros/common/create_external_schema.sql b/macros/common/create_external_schema.sql new file mode 100644 index 00000000..c96cbcda --- /dev/null +++ b/macros/common/create_external_schema.sql @@ -0,0 +1,11 @@ +{% macro create_external_schema(source_node) %} + {{ adapter.dispatch('create_external_schema', 'dbt_external_tables')(source_node) }} +{% endmacro %} + +{% macro default__create_external_schema(source_node) %} + {% set ddl %} + create schema if not exists {{ source_node.schema }} + {% endset %} + + {{return(ddl)}} +{% endmacro %} diff --git a/macros/plugins/bigquery/get_external_build_plan.sql b/macros/plugins/bigquery/get_external_build_plan.sql index fe7d7838..f90c1bfa 100644 --- a/macros/plugins/bigquery/get_external_build_plan.sql +++ b/macros/plugins/bigquery/get_external_build_plan.sql @@ -11,7 +11,10 @@ {% set create_or_replace = (old_relation is none or var('ext_full_refresh', false)) %} {% if create_or_replace %} - {% set build_plan = build_plan + [dbt_external_tables.create_external_table(source_node)] %} + {% set build_plan = build_plan + [ + dbt_external_tables.create_external_schema(source_node), + dbt_external_tables.create_external_table(source_node) + ] %} {% else %} {% set build_plan = build_plan + dbt_external_tables.refresh_external_table(source_node) %} {% endif %} diff --git a/macros/plugins/snowflake/get_external_build_plan.sql b/macros/plugins/snowflake/get_external_build_plan.sql index 0f73a0b2..807cbb5d 100644 --- a/macros/plugins/snowflake/get_external_build_plan.sql +++ b/macros/plugins/snowflake/get_external_build_plan.sql @@ -14,6 +14,7 @@ {% if create_or_replace %} {% set build_plan = build_plan + [ + dbt_external_tables.create_external_schema(source_node), dbt_external_tables.snowflake_create_empty_table(source_node), dbt_external_tables.snowflake_get_copy_sql(source_node, explicit_transaction=true), dbt_external_tables.snowflake_create_snowpipe(source_node) @@ -25,7 +26,10 @@ {% else %} {% if create_or_replace %} - {% set build_plan = build_plan + [dbt_external_tables.create_external_table(source_node)] %} + {% set build_plan = build_plan + [ + dbt_external_tables.create_external_schema(source_node), + dbt_external_tables.create_external_table(source_node) + ] %} {% else %} {% set build_plan = build_plan + dbt_external_tables.refresh_external_table(source_node) %} {% endif %} diff --git a/macros/plugins/spark/get_external_build_plan.sql b/macros/plugins/spark/get_external_build_plan.sql index 38afdac6..cab1ca29 100644 --- a/macros/plugins/spark/get_external_build_plan.sql +++ b/macros/plugins/spark/get_external_build_plan.sql @@ -12,6 +12,7 @@ {% if create_or_replace %} {% set build_plan = build_plan + [ + dbt_external_tables.create_external_schema(source_node), dbt_external_tables.dropif(source_node), dbt_external_tables.create_external_table(source_node) ] %} diff --git a/macros/plugins/sqlserver/create_external_schema.sql b/macros/plugins/sqlserver/create_external_schema.sql new file mode 100644 index 00000000..c1ef48be --- /dev/null +++ b/macros/plugins/sqlserver/create_external_schema.sql @@ -0,0 +1,13 @@ +{% macro sqlserver__create_external_schema(source_node) %} + {# https://learn.microsoft.com/en-us/sql/t-sql/statements/create-schema-transact-sql?view=sql-server-ver16 #} + + {% set ddl %} + IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{{ source_node.schema }}') + BEGIN + EXEC('CREATE SCHEMA [{{ source_node.schema }}]') + END + {% endset %} + + {{return(ddl)}} + +{% endmacro %} diff --git a/macros/plugins/sqlserver/get_external_build_plan.sql b/macros/plugins/sqlserver/get_external_build_plan.sql index 4e974100..7a161a0d 100644 --- a/macros/plugins/sqlserver/get_external_build_plan.sql +++ b/macros/plugins/sqlserver/get_external_build_plan.sql @@ -12,6 +12,7 @@ {% if create_or_replace %} {% set build_plan = build_plan + [ + dbt_external_tables.create_external_schema(source_node), dbt_external_tables.dropif(source_node), dbt_external_tables.create_external_table(source_node) ] %}