Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Merge workflows, test on all available targets #400

Merged
merged 4 commits into from
Dec 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 126 additions & 0 deletions .github/workflows/transformations_aws_asset_inventory_free.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
name: "Test AWS Asset Inventory Free Policies"

on:
pull_request:
paths:
- "transformations/aws/asset-inventory-free/**"
- ".github/workflows/transformations_aws_asset_inventory_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"
push:
branches:
- main
paths:
- "transformations/aws/asset-inventory-free/**"
- ".github/workflows/transformations_aws_asset_inventory_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"

env:
SNOW_USER: ${{ secrets.SNOW_USER }}
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }}
# DBT assumes the account is in the form of <account>.<region>
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}"
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }}
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }}
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }}
SNOW_REGION: ${{ secrets.SNOW_REGION }}

jobs:
prepare:
runs-on: ubuntu-latest
outputs:
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }}
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }}
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }}
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/github-script@v7
id: set-result
env:
TRANSFORMATION_DIR: transformations/aws/asset-inventory-free
with:
script: |
const fs = require('fs/promises');
const { TRANSFORMATION_DIR: transformation_dir } = process.env;
const [postgres, snowflake, bigquery] = await Promise.all([
fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
]);
console.log(JSON.stringify({ transformation_dir, postgres, snowflake, bigquery }));
return {
transformation_dir,
postgres,
snowflake,
bigquery,
};
transformations-aws-asset-inventory-free:
permissions:
id-token: 'write'
contents: 'read'
name: ${{ needs.prepare.outputs.transformation_dir }}
needs: prepare
timeout-minutes: 30
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
services:
postgres:
image: postgres:11
env:
POSTGRES_PASSWORD: pass
POSTGRES_USER: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v1'
if: needs.prepare.outputs.bigquery == 'true'
with:
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider'
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com'
- uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "${{ needs.prepare.outputs.transformation_dir }}/requirements.txt"
- name: Install dependencies
run: pip install -r requirements.txt
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.2
- name: Test Postgres
run: |
cloudquery migrate tests/postgres.yml
dbt run --target dev-pg --profiles-dir ./tests
if: needs.prepare.outputs.postgres == 'true'
env:
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres
- name: Test Snowflake
run: |
cloudquery migrate tests/snowflake.yml
dbt run --target dev-snowflake --profiles-dir ./tests
if: needs.prepare.outputs.snowflake == 'true'
env:
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}"
- name: Test BigQuery
if: needs.prepare.outputs.bigquery == 'true'
run: |
cloudquery migrate tests/bigquery.yml
dbt run --target dev-bigquery --profiles-dir ./tests

This file was deleted.

125 changes: 125 additions & 0 deletions .github/workflows/transformations_aws_compliance_free.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
name: "Test AWS Compliance Free Policies"

on:
pull_request:
paths:
- "transformations/aws/compliance-free/**"
- ".github/workflows/transformations_aws_compliance_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"
push:
branches:
- main
paths:
- "transformations/aws/compliance-free/**"
- ".github/workflows/transformations_aws_compliance_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"

env:
SNOW_USER: ${{ secrets.SNOW_USER }}
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }}
# DBT assumes the account is in the form of <account>.<region>
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}"
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }}
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }}
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }}
SNOW_REGION: ${{ secrets.SNOW_REGION }}

jobs:
prepare:
runs-on: ubuntu-latest
outputs:
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }}
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }}
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }}
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/github-script@v7
id: set-result
env:
TRANSFORMATION_DIR: transformations/aws/compliance-free
with:
script: |
const fs = require('fs/promises');
const { TRANSFORMATION_DIR: transformation_dir } = process.env;
const [postgres, snowflake, bigquery] = await Promise.all([
Copy link
Member Author

@erezrokah erezrokah Dec 11, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The main change in this PR is to check for the relevant DB target specs and run migrate + DBT on each existing target. So you can add a new target to the CI by simply adding a bigquery.yml file for example

fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
]);
return {
transformation_dir,
postgres,
snowflake,
bigquery,
};
transformations-aws-compliance-free:
permissions:
id-token: 'write'
contents: 'read'
name: ${{ needs.prepare.outputs.transformation_dir }}
needs: prepare
timeout-minutes: 30
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
services:
postgres:
image: postgres:11
env:
POSTGRES_PASSWORD: pass
POSTGRES_USER: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v1'
if: needs.prepare.outputs.bigquery == 'true'
with:
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider'
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com'
- uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "${{ needs.prepare.outputs.transformation_dir }}/requirements.txt"
- name: Install dependencies
run: pip install -r requirements.txt
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.2
- name: Test Postgres
run: |
cloudquery migrate tests/postgres.yml
dbt run --target dev-pg --profiles-dir ./tests --select aws_compliance__foundational_security_free
if: needs.prepare.outputs.postgres == 'true'
env:
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres
- name: Test Snowflake
run: |
cloudquery migrate tests/snowflake.yml
dbt run --target dev-snowflake --profiles-dir ./tests --select aws_compliance__foundational_security_free
if: needs.prepare.outputs.snowflake == 'true'
env:
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}"
- name: Test BigQuery
if: needs.prepare.outputs.bigquery == 'true'
run: |
cloudquery migrate tests/bigquery.yml
dbt run --target dev-bigquery --profiles-dir ./tests --select aws_compliance__foundational_security_free

This file was deleted.

Loading