Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added sqlserver compatibility including CI #248

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 53 additions & 20 deletions .github/workflows/ci_test_package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,25 @@ on:
# will run in the context of the target branch of a PR
pull_request_target:

env:
# These are configured in GitHub secrets
DBT_PROFILES_DIR: /home/runner/work/${{ github.event.repository.name }}/${{ github.event.repository.name }}/integration_test_project
GITHUB_SHA_OVERRIDE: ${{ github.event.pull_request.head.sha }} # We need the commit hash of the pull request branch's head, the GITHUB_SHA env var is always the base branch in a pull_request_target trigger
DBT_ENV_SECRET_SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
DBT_ENV_SECRET_DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}
DBT_ENV_SECRET_DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH }}
DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
DBT_ENV_SECRET_GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
# Env vars to test invocations model
DBT_CLOUD_PROJECT_ID: 123
DBT_CLOUD_JOB_ID: ABC
DBT_CLOUD_RUN_REASON: "String with 'quotes' !"
TEST_ENV_VAR_1: TEST_VALUE
DBT_ENV_CUSTOM_ENV_FAVOURITE_DBT_PACKAGE: dbt_artifacts
# These are configured in GitHub secrets
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't these need to be under the env key?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Right you are, I've updated my branch with this change.

DBT_PROFILES_DIR: /home/runner/work/${{ github.event.repository.name }}/${{ github.event.repository.name }}/integration_test_project
GITHUB_SHA_OVERRIDE: ${{ github.event.pull_request.head.sha }} # We need the commit hash of the pull request branch's head, the GITHUB_SHA env var is always the base branch in a pull_request_target trigger
DBT_ENV_SECRET_SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
DBT_ENV_SECRET_SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
DBT_ENV_SECRET_DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}
DBT_ENV_SECRET_DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH }}
DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
DBT_ENV_SECRET_GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
# Env vars to test invocations model
DBT_CLOUD_PROJECT_ID: 123
DBT_CLOUD_JOB_ID: ABC
DBT_CLOUD_RUN_REASON: "String with 'quotes' !"
TEST_ENV_VAR_1: TEST_VALUE
DBT_ENV_CUSTOM_ENV_FAVOURITE_DBT_PACKAGE: dbt_artifacts


jobs:
Expand Down Expand Up @@ -104,6 +103,40 @@ jobs:
- name: Run BigQuery Tests
run: tox -e integration_bigquery

integration-sqlserver:
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.sha }} # Check out the code of the PR


- uses: actions/setup-python@v2
with:
python-version: '3.8.x'
architecture: 'x64'

- name: Install SQL Server
run: docker run -e "ACCEPT_EULA=Y" -e "MSSQL_SA_PASSWORD=1StrongPwd!!" -p 1433:1433 -d mcr.microsoft.com/mssql/server:2019-latest

- name: Install Microsoft ODBC
run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y

- name: Install tox
run: python3 -m pip install tox

- name: Run SQL Server Tests
run: tox -e integration_sqlserver

- name: Upload logfile
uses: actions/upload-artifact@v3
if: always()
with:
name: dbt.log
path: /home/runner/work/dbt_artifacts/dbt_artifacts/integration_test_project/logs/

sqlfluff-lint-models:
name: Lint dbt models using SQLFluff
runs-on: ubuntu-latest
Expand Down
31 changes: 31 additions & 0 deletions .github/workflows/main_test_package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,37 @@ jobs:
- name: Run BigQuery Tests
run: tox -e integration_bigquery

integration-sqlserver:
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v2

- uses: actions/setup-python@v2
with:
python-version: '3.8.x'
architecture: 'x64'

- name: Install SQL Server
run: docker run -e "ACCEPT_EULA=Y" -e "MSSQL_SA_PASSWORD=1StrongPwd!!" -p 1433:1433 -d mcr.microsoft.com/mssql/server:2019-latest

- name: Install Microsoft ODBC
run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y

- name: Install tox
run: python3 -m pip install tox

- name: Run SQL Server Tests
run: tox -e integration_sqlserver

- name: Upload logfile
uses: actions/upload-artifact@v3
if: always()
with:
name: dbt.log
path: /home/runner/work/dbt_artifacts/dbt_artifacts/integration_test_project/logs/

sqlfluff-lint-models:
name: Lint dbt models using SQLFluff
runs-on: ubuntu-latest
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ The package currently supports
- Spark :white_check_mark:
- Snowflake adapters :white_check_mark:
- Google BigQuery :white_check_mark:
- Microsoft SQL Server :white_check_mark:

Models included:

Expand Down
4 changes: 2 additions & 2 deletions integration_test_project/dbt_project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ vars:

models:
+persist_docs:
relation: true
columns: true
relation: "{{ false if target.name == 'sqlserver' else true }}"
columns: "{{ false if target.name == 'sqlserver' else true }}"
seeds:
+quote_columns: false

Expand Down
6 changes: 3 additions & 3 deletions integration_test_project/models/tests_and_exposures.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ models:
- unique

exposures:
- name: "ceo's imaginary dashboard"
- name: "ceos_imaginary_dashboard"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What's the thinking here? :)

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

dbt 1.3 gave a warning that spaces in the name will not be permitted in a future version. Also with sql server the spaces were failing the ci tests, so killed two birds with one stone.

type: dashboard
maturity: high
description: "ceo's favourite dashboard"
description: "ceos favourite dashboard"
url: https://bi.tool/dashboards/1

depends_on:
Expand All @@ -23,7 +23,7 @@ exposures:
name: Claire from Data
email: [email protected]

- name: "CIO dashboard"
- name: "CIO_dashboard"
type: dashboard
maturity: high
description: '{{ doc("clickstream") }}'
Expand Down
12 changes: 12 additions & 0 deletions integration_test_project/profiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,15 @@ dbt_artifacts:
timeout_seconds: 300
priority: interactive
retries: 1
sqlserver:
type: sqlserver
#schema: dbt_artifacts_test_commit_{{ env_var('GITHUB_SHA_OVERRIDE', '') if env_var('GITHUB_SHA_OVERRIDE', '') else env_var('GITHUB_SHA') }}
schema: dbttest
driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system)
server: localhost
port: 1433
database: master
user: sa
password: 1StrongPwd!!
encrypt: True
trust_cert: True
4 changes: 4 additions & 0 deletions integration_test_project/tests/singular_test.sql
Original file line number Diff line number Diff line change
@@ -1 +1,5 @@
{% if target.name == 'sqlserver' %}
select 1 as failures from (values(2)) as tab(col) where 1 = 2
{% else %}
select 1 as failures from (select 2) where 1 = 2
{% endif %}
4 changes: 4 additions & 0 deletions macros/column_identifier.sql
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,7 @@
{% macro spark__column_identifier(column_index) -%}
col{{ column_index }}
{%- endmacro %}

{% macro sqlserver__column_identifier(column_index) -%}
col{{ column_index }}
{%- endmacro %}
9 changes: 9 additions & 0 deletions macros/insert_into_metadata_table.sql
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,14 @@

{%- endmacro %}

{% macro sqlserver__insert_into_metadata_table(database_name, schema_name, table_name, content) -%}
{% set insert_into_table_query %}
insert into {{ schema_name }}.{{ table_name }}
{{ content }}
{% endset %}

{% do run_query(insert_into_table_query) %}
{%- endmacro %}

{% macro default__insert_into_metadata_table(database_name, schema_name, table_name, content) -%}
{%- endmacro %}
Loading