From 907f14e83e7eeb1c1f0c65610d6b6cd57416aea3 Mon Sep 17 00:00:00 2001 From: Aris Tzoumas Date: Fri, 16 Feb 2024 10:26:00 +0200 Subject: [PATCH] feat: sqlconnect library --- .github/dependabot.yml | 10 + .github/labeler.yml | 2 + .github/pull_request_template.md | 11 + .github/workflows/labeler.yaml | 15 + .../workflows/pr-description-enforcer.yaml | 17 + .github/workflows/prerelease.yaml | 54 ++ .github/workflows/release-please.yaml | 22 + .github/workflows/semantic-pr.yaml | 59 ++ .github/workflows/stale-pr.yaml | 40 ++ .github/workflows/test.yaml | 99 +++ .github/workflows/verify.yml | 56 ++ .gitignore | 36 +- Makefile | 53 ++ README.md | 97 +++ go.mod | 147 +++++ go.sum | 532 +++++++++++++++ internal/scripts/install-golangci-lint.sh | 7 + sqlconnect/async.go | 134 ++++ sqlconnect/columnref.go | 8 + sqlconnect/config/config.go | 19 + sqlconnect/db.go | 100 +++ sqlconnect/db_factory.go | 23 + sqlconnect/db_factory_test.go | 14 + sqlconnect/internal/base/db.go | 129 ++++ sqlconnect/internal/base/dbopts.go | 52 ++ sqlconnect/internal/base/dialect.go | 28 + sqlconnect/internal/base/dialect_test.go | 30 + sqlconnect/internal/base/mapper.go | 10 + sqlconnect/internal/base/schemaadmin.go | 92 +++ sqlconnect/internal/base/tableadmin.go | 267 ++++++++ sqlconnect/internal/bigquery/config.go | 19 + sqlconnect/internal/bigquery/db.go | 108 ++++ sqlconnect/internal/bigquery/dialect.go | 27 + sqlconnect/internal/bigquery/dialect_test.go | 30 + .../internal/bigquery/driver/columns.go | 71 ++ .../internal/bigquery/driver/connection.go | 78 +++ .../internal/bigquery/driver/connector.go | 40 ++ sqlconnect/internal/bigquery/driver/driver.go | 90 +++ .../internal/bigquery/driver/driver_test.go | 228 +++++++ sqlconnect/internal/bigquery/driver/init.go | 9 + sqlconnect/internal/bigquery/driver/result.go | 19 + sqlconnect/internal/bigquery/driver/rows.go | 58 ++ sqlconnect/internal/bigquery/driver/source.go | 56 ++ .../internal/bigquery/driver/statement.go | 133 ++++ .../internal/bigquery/integration_test.go | 18 + .../internal/bigquery/legacy_mappings.go | 62 ++ sqlconnect/internal/bigquery/mappings.go | 95 +++ sqlconnect/internal/bigquery/schemaadmin.go | 57 ++ .../testdata/column-mapping-test-columns.json | 29 + .../testdata/column-mapping-test-rows.json | 89 +++ .../testdata/column-mapping-test-seed.sql | 36 ++ ...egacy-column-mapping-test-columns-sql.json | 29 + ...acy-column-mapping-test-columns-table.json | 29 + .../legacy-column-mapping-test-rows.json | 105 +++ sqlconnect/internal/databricks/config.go | 35 + sqlconnect/internal/databricks/db.go | 107 +++ sqlconnect/internal/databricks/dialect.go | 27 + .../internal/databricks/dialect_test.go | 30 + .../internal/databricks/integration_test.go | 30 + .../internal/databricks/legacy_mappings.go | 47 ++ sqlconnect/internal/databricks/mappings.go | 82 +++ sqlconnect/internal/databricks/tableadmin.go | 20 + .../testdata/column-mapping-test-columns.json | 26 + .../testdata/column-mapping-test-rows.json | 83 +++ .../testdata/column-mapping-test-seed.sql | 33 + ...egacy-column-mapping-test-columns-sql.json | 26 + ...acy-column-mapping-test-columns-table.json | 26 + .../legacy-column-mapping-test-rows.json | 80 +++ .../db_integration_test_scenario.go | 609 ++++++++++++++++++ sqlconnect/internal/mysql/config.go | 53 ++ sqlconnect/internal/mysql/config_test.go | 51 ++ sqlconnect/internal/mysql/db.go | 79 +++ sqlconnect/internal/mysql/dialect.go | 27 + sqlconnect/internal/mysql/dialect_test.go | 30 + sqlconnect/internal/mysql/integration_test.go | 38 ++ sqlconnect/internal/mysql/legacy_mappings.go | 63 ++ sqlconnect/internal/mysql/mappings.go | 104 +++ .../testdata/column-mapping-test-columns.json | 34 + .../testdata/column-mapping-test-rows.json | 104 +++ .../testdata/column-mapping-test-seed.sql | 41 ++ ...egacy-column-mapping-test-columns-sql.json | 34 + ...acy-column-mapping-test-columns-table.json | 34 + .../legacy-column-mapping-test-rows.json | 104 +++ sqlconnect/internal/postgres/config.go | 46 ++ sqlconnect/internal/postgres/db.go | 64 ++ .../internal/postgres/integration_test.go | 38 ++ .../internal/postgres/legacy_mappings.go | 56 ++ sqlconnect/internal/postgres/mappings.go | 66 ++ .../testdata/column-mapping-test-columns.json | 30 + .../testdata/column-mapping-test-rows.json | 96 +++ .../testdata/column-mapping-test-seed.sql | 37 ++ ...egacy-column-mapping-test-columns-sql.json | 30 + ...acy-column-mapping-test-columns-table.json | 30 + .../legacy-column-mapping-test-rows.json | 96 +++ sqlconnect/internal/redshift/db.go | 74 +++ .../internal/redshift/integration_test.go | 19 + .../internal/redshift/legacy_mappings.go | 54 ++ sqlconnect/internal/redshift/mappings.go | 56 ++ .../testdata/column-mapping-test-columns.json | 28 + .../testdata/column-mapping-test-rows.json | 86 +++ .../testdata/column-mapping-test-seed.sql | 35 + ...egacy-column-mapping-test-columns-sql.json | 28 + ...acy-column-mapping-test-columns-table.json | 28 + .../legacy-column-mapping-test-rows.json | 86 +++ sqlconnect/internal/snowflake/config.go | 54 ++ sqlconnect/internal/snowflake/db.go | 96 +++ sqlconnect/internal/snowflake/dialect.go | 27 + sqlconnect/internal/snowflake/dialect_test.go | 30 + .../internal/snowflake/integration_test.go | 19 + .../internal/snowflake/legacy_mappings.go | 120 ++++ sqlconnect/internal/snowflake/mappings.go | 127 ++++ .../internal/snowflake/mappings_test.go | 29 + .../testdata/column-mapping-test-columns.json | 35 + .../testdata/column-mapping-test-rows.json | 107 +++ .../testdata/column-mapping-test-seed.sql | 51 ++ ...egacy-column-mapping-test-columns-sql.json | 35 + ...acy-column-mapping-test-columns-table.json | 35 + .../legacy-column-mapping-test-rows.json | 107 +++ sqlconnect/internal/trino/config.go | 54 ++ sqlconnect/internal/trino/db.go | 84 +++ sqlconnect/internal/trino/integration_test.go | 20 + sqlconnect/internal/trino/mappings.go | 75 +++ .../testdata/column-mapping-test-columns.json | 19 + .../testdata/column-mapping-test-rows.json | 69 ++ .../testdata/column-mapping-test-seed.sql | 26 + sqlconnect/internal/util/validatehost.go | 21 + sqlconnect/internal/util/validatehost_test.go | 26 + sqlconnect/querydef.go | 56 ++ sqlconnect/querydef_test.go | 64 ++ sqlconnect/relationref.go | 69 ++ sqlconnect/relationref_opts.go | 27 + sqlconnect/relationref_test.go | 60 ++ sqlconnect/schemaref.go | 10 + sqlconnect/schemaref_test.go | 16 + 134 files changed, 8366 insertions(+), 20 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .github/labeler.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/labeler.yaml create mode 100644 .github/workflows/pr-description-enforcer.yaml create mode 100644 .github/workflows/prerelease.yaml create mode 100644 .github/workflows/release-please.yaml create mode 100644 .github/workflows/semantic-pr.yaml create mode 100644 .github/workflows/stale-pr.yaml create mode 100644 .github/workflows/test.yaml create mode 100644 .github/workflows/verify.yml create mode 100644 Makefile create mode 100644 README.md create mode 100644 go.mod create mode 100644 go.sum create mode 100755 internal/scripts/install-golangci-lint.sh create mode 100644 sqlconnect/async.go create mode 100644 sqlconnect/columnref.go create mode 100644 sqlconnect/config/config.go create mode 100644 sqlconnect/db.go create mode 100644 sqlconnect/db_factory.go create mode 100644 sqlconnect/db_factory_test.go create mode 100644 sqlconnect/internal/base/db.go create mode 100644 sqlconnect/internal/base/dbopts.go create mode 100644 sqlconnect/internal/base/dialect.go create mode 100644 sqlconnect/internal/base/dialect_test.go create mode 100644 sqlconnect/internal/base/mapper.go create mode 100644 sqlconnect/internal/base/schemaadmin.go create mode 100644 sqlconnect/internal/base/tableadmin.go create mode 100644 sqlconnect/internal/bigquery/config.go create mode 100644 sqlconnect/internal/bigquery/db.go create mode 100644 sqlconnect/internal/bigquery/dialect.go create mode 100644 sqlconnect/internal/bigquery/dialect_test.go create mode 100644 sqlconnect/internal/bigquery/driver/columns.go create mode 100644 sqlconnect/internal/bigquery/driver/connection.go create mode 100644 sqlconnect/internal/bigquery/driver/connector.go create mode 100644 sqlconnect/internal/bigquery/driver/driver.go create mode 100644 sqlconnect/internal/bigquery/driver/driver_test.go create mode 100644 sqlconnect/internal/bigquery/driver/init.go create mode 100644 sqlconnect/internal/bigquery/driver/result.go create mode 100644 sqlconnect/internal/bigquery/driver/rows.go create mode 100644 sqlconnect/internal/bigquery/driver/source.go create mode 100644 sqlconnect/internal/bigquery/driver/statement.go create mode 100644 sqlconnect/internal/bigquery/integration_test.go create mode 100644 sqlconnect/internal/bigquery/legacy_mappings.go create mode 100644 sqlconnect/internal/bigquery/mappings.go create mode 100644 sqlconnect/internal/bigquery/schemaadmin.go create mode 100644 sqlconnect/internal/bigquery/testdata/column-mapping-test-columns.json create mode 100644 sqlconnect/internal/bigquery/testdata/column-mapping-test-rows.json create mode 100644 sqlconnect/internal/bigquery/testdata/column-mapping-test-seed.sql create mode 100644 sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-sql.json create mode 100644 sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-table.json create mode 100644 sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-rows.json create mode 100644 sqlconnect/internal/databricks/config.go create mode 100644 sqlconnect/internal/databricks/db.go create mode 100644 sqlconnect/internal/databricks/dialect.go create mode 100644 sqlconnect/internal/databricks/dialect_test.go create mode 100644 sqlconnect/internal/databricks/integration_test.go create mode 100644 sqlconnect/internal/databricks/legacy_mappings.go create mode 100644 sqlconnect/internal/databricks/mappings.go create mode 100644 sqlconnect/internal/databricks/tableadmin.go create mode 100644 sqlconnect/internal/databricks/testdata/column-mapping-test-columns.json create mode 100644 sqlconnect/internal/databricks/testdata/column-mapping-test-rows.json create mode 100644 sqlconnect/internal/databricks/testdata/column-mapping-test-seed.sql create mode 100644 sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-sql.json create mode 100644 sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-table.json create mode 100644 sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-rows.json create mode 100644 sqlconnect/internal/integration_test/db_integration_test_scenario.go create mode 100644 sqlconnect/internal/mysql/config.go create mode 100644 sqlconnect/internal/mysql/config_test.go create mode 100644 sqlconnect/internal/mysql/db.go create mode 100644 sqlconnect/internal/mysql/dialect.go create mode 100644 sqlconnect/internal/mysql/dialect_test.go create mode 100644 sqlconnect/internal/mysql/integration_test.go create mode 100644 sqlconnect/internal/mysql/legacy_mappings.go create mode 100644 sqlconnect/internal/mysql/mappings.go create mode 100644 sqlconnect/internal/mysql/testdata/column-mapping-test-columns.json create mode 100644 sqlconnect/internal/mysql/testdata/column-mapping-test-rows.json create mode 100644 sqlconnect/internal/mysql/testdata/column-mapping-test-seed.sql create mode 100644 sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-sql.json create mode 100644 sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-table.json create mode 100644 sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-rows.json create mode 100644 sqlconnect/internal/postgres/config.go create mode 100644 sqlconnect/internal/postgres/db.go create mode 100644 sqlconnect/internal/postgres/integration_test.go create mode 100644 sqlconnect/internal/postgres/legacy_mappings.go create mode 100644 sqlconnect/internal/postgres/mappings.go create mode 100644 sqlconnect/internal/postgres/testdata/column-mapping-test-columns.json create mode 100644 sqlconnect/internal/postgres/testdata/column-mapping-test-rows.json create mode 100644 sqlconnect/internal/postgres/testdata/column-mapping-test-seed.sql create mode 100644 sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-sql.json create mode 100644 sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-table.json create mode 100644 sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-rows.json create mode 100644 sqlconnect/internal/redshift/db.go create mode 100644 sqlconnect/internal/redshift/integration_test.go create mode 100644 sqlconnect/internal/redshift/legacy_mappings.go create mode 100644 sqlconnect/internal/redshift/mappings.go create mode 100644 sqlconnect/internal/redshift/testdata/column-mapping-test-columns.json create mode 100644 sqlconnect/internal/redshift/testdata/column-mapping-test-rows.json create mode 100644 sqlconnect/internal/redshift/testdata/column-mapping-test-seed.sql create mode 100644 sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-sql.json create mode 100644 sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-table.json create mode 100644 sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-rows.json create mode 100644 sqlconnect/internal/snowflake/config.go create mode 100644 sqlconnect/internal/snowflake/db.go create mode 100644 sqlconnect/internal/snowflake/dialect.go create mode 100644 sqlconnect/internal/snowflake/dialect_test.go create mode 100644 sqlconnect/internal/snowflake/integration_test.go create mode 100644 sqlconnect/internal/snowflake/legacy_mappings.go create mode 100644 sqlconnect/internal/snowflake/mappings.go create mode 100644 sqlconnect/internal/snowflake/mappings_test.go create mode 100644 sqlconnect/internal/snowflake/testdata/column-mapping-test-columns.json create mode 100644 sqlconnect/internal/snowflake/testdata/column-mapping-test-rows.json create mode 100644 sqlconnect/internal/snowflake/testdata/column-mapping-test-seed.sql create mode 100644 sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-sql.json create mode 100644 sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-table.json create mode 100644 sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-rows.json create mode 100644 sqlconnect/internal/trino/config.go create mode 100644 sqlconnect/internal/trino/db.go create mode 100644 sqlconnect/internal/trino/integration_test.go create mode 100644 sqlconnect/internal/trino/mappings.go create mode 100644 sqlconnect/internal/trino/testdata/column-mapping-test-columns.json create mode 100644 sqlconnect/internal/trino/testdata/column-mapping-test-rows.json create mode 100644 sqlconnect/internal/trino/testdata/column-mapping-test-seed.sql create mode 100644 sqlconnect/internal/util/validatehost.go create mode 100644 sqlconnect/internal/util/validatehost_test.go create mode 100644 sqlconnect/querydef.go create mode 100644 sqlconnect/querydef_test.go create mode 100644 sqlconnect/relationref.go create mode 100644 sqlconnect/relationref_opts.go create mode 100644 sqlconnect/relationref_test.go create mode 100644 sqlconnect/schemaref.go create mode 100644 sqlconnect/schemaref_test.go diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..cdce3f8 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "gomod" + directory: "/" + schedule: + interval: "weekly" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" \ No newline at end of file diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000..6c9e9d2 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,2 @@ +with tests: +- any: ['**/*_test.go'] diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..99090b5 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,11 @@ +# Description + +< Replace with adequate description for this PR as per [Pull Request document](https://www.notion.so/rudderstacks/Pull-Requests-40a4c6bd7a5e4387ba9029bab297c9e3) > + +## Linear Ticket + +< Replace_with_Linear_Link > + +## Security + +- [ ] The code changed/added as part of this pull request won't create any security issues with how the software is being used. diff --git a/.github/workflows/labeler.yaml b/.github/workflows/labeler.yaml new file mode 100644 index 0000000..1443b60 --- /dev/null +++ b/.github/workflows/labeler.yaml @@ -0,0 +1,15 @@ +name: labeler +on: +- pull_request + +jobs: + triage: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v4 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + sync-labels: true diff --git a/.github/workflows/pr-description-enforcer.yaml b/.github/workflows/pr-description-enforcer.yaml new file mode 100644 index 0000000..e82ca03 --- /dev/null +++ b/.github/workflows/pr-description-enforcer.yaml @@ -0,0 +1,17 @@ +name: description +on: + pull_request: + types: + - opened + - edited + - reopened + +jobs: + enforce: + runs-on: ubuntu-latest + + steps: + - uses: rudderlabs/pr-description-enforcer@v1.0.0 + with: + repo-token: '${{ secrets.GITHUB_TOKEN }}' + placeholder-regex: '< Replace .* >' diff --git a/.github/workflows/prerelease.yaml b/.github/workflows/prerelease.yaml new file mode 100644 index 0000000..1fe7767 --- /dev/null +++ b/.github/workflows/prerelease.yaml @@ -0,0 +1,54 @@ +on: + push: + branches: + - "prerelease/*" +name: prerelease +jobs: + prerelease: + runs-on: ubuntu-latest + steps: + - name: Extract branch name + shell: bash + run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})" + id: extract_branch + - uses: google-github-actions/release-please-action@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + pull-request-title-pattern: "chore: prerelease ${version}" + release-type: go + package-name: rudder-server + default-branch: ${{ steps.extract_branch.outputs.branch }} + changelog-types: ' + [ + { + "type": "feat", + "section": "Features", + "hidden": false + }, + { + "type": "fix", + "section": "Bug Fixes", + "hidden": false + }, + { + "type": "chore", + "section":"Miscellaneous", + "hidden": false}, + { + "type": "refactor", + "section": "Miscellaneous", + "hidden": false + }, + { + "type": "test", + "section": "Miscellaneous", + "hidden": false + }, + { + "type": "doc", + "section": "Documentation", + "hidden": false + } + ]' + prerelease: true + release-as: ${{ steps.extract_branch.outputs.branch }} diff --git a/.github/workflows/release-please.yaml b/.github/workflows/release-please.yaml new file mode 100644 index 0000000..5417a3f --- /dev/null +++ b/.github/workflows/release-please.yaml @@ -0,0 +1,22 @@ +on: + push: + branches: + - "main" +name: release-please +jobs: + release-please: + runs-on: ubuntu-latest + steps: + - name: Extract branch name + shell: bash + run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})" + id: extract_branch + - uses: google-github-actions/release-please-action@v3 + with: + token: ${{ secrets.PAT }} + pull-request-title-pattern: "chore: release ${version}" + release-type: go + package-name: rudder-server + default-branch: ${{ steps.extract_branch.outputs.branch }} + changelog-types: '[{"type":"feat","section":"Features","hidden":false},{"type":"fix","section":"Bug Fixes","hidden":false},{"type":"chore","section":"Miscellaneous","hidden":false},{"type":"refactor","section":"Miscellaneous","hidden":false},{"type":"test","section":"Miscellaneous","hidden":false},{"type":"doc","section":"Documentation","hidden":false}]' + bump-minor-pre-major: true diff --git a/.github/workflows/semantic-pr.yaml b/.github/workflows/semantic-pr.yaml new file mode 100644 index 0000000..0122c67 --- /dev/null +++ b/.github/workflows/semantic-pr.yaml @@ -0,0 +1,59 @@ +name: semantic + +on: + pull_request: + types: + - opened + - edited + - labeled + - unlabeled + - converted_to_draft + - ready_for_review + - synchronize + +jobs: + main: + name: title + runs-on: ubuntu-latest + steps: + - + uses: amannn/action-semantic-pull-request@v4 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + types: | + fix + feat + chore + refactor + exp + doc + test + requireScope: false + subjectPattern: ^(?![A-Z]).+$ + subjectPatternError: | + The subject "{subject}" found in the pull request title "{title}" + didn't match the configured pattern. Please ensure that the subject + doesn't start with an uppercase character. + # For work-in-progress PRs you can typically use draft pull requests + # from GitHub. However, private repositories on the free plan don't have + # this option and therefore this action allows you to opt-in to using the + # special "[WIP]" prefix to indicate this state. This will avoid the + # validation of the PR title and the pull request checks remain pending. + # Note that a second check will be reported if this is enabled. + wip: true + # When using "Squash and merge" on a PR with only one commit, GitHub + # will suggest using that commit message instead of the PR title for the + # merge commit, and it's easy to commit this by mistake. Enable this option + # to also validate the commit message for one commit PRs. + validateSingleCommit: false + # Related to `validateSingleCommit` you can opt-in to validate that the PR + # title matches a single commit to avoid confusion. + validateSingleCommitMatchesPrTitle: false + # If the PR contains one of these labels, the validation is skipped. + # Multiple labels can be separated by newlines. + # If you want to rerun the validation when labels change, you might want + # to use the `labeled` and `unlabeled` event triggers in your workflow. + ignoreLabels: | + bot + dependencies diff --git a/.github/workflows/stale-pr.yaml b/.github/workflows/stale-pr.yaml new file mode 100644 index 0000000..8e4771f --- /dev/null +++ b/.github/workflows/stale-pr.yaml @@ -0,0 +1,40 @@ +name: stale + +on: + schedule: + - cron: '42 1 * * *' + +jobs: + prs: + name: cleanup + runs-on: ubuntu-latest + + permissions: + pull-requests: write + + steps: + - uses: actions/stale@v5 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + operations-per-run: 200 + stale-pr-message: 'This PR is considered to be stale. It has been open 20 days with no further activity thus it is going to be closed in 5 days. To avoid such a case please consider removing the stale label manually or add a comment to the PR.' + days-before-pr-stale: 20 + days-before-pr-close: 7 + stale-pr-label: 'Stale' + + branches: + name: cleanup old branches + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + - name: Run delete-old-branches-action + uses: beatlabs/delete-old-branches-action@v0.0.9 + with: + repo_token: ${{ github.token }} + date: '2 months ago' + dry_run: false + delete_tags: false + extra_protected_branch_regex: ^(main|master|release.*|rudder-saas)$ + exclude_open_pr_branches: true + diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..6236071 --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,99 @@ +name: test +on: + release: + types: [created] + push: + branches: + - main + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + id-token: write # allows the JWT to be requested from GitHub's OIDC provider + contents: read # This is required for actions/checkout + +jobs: + tests: + name: matrix test + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + package: + - . + - sqlconnect/internal + - sqlconnect/internal/bigquery + - sqlconnect/internal/databricks + - sqlconnect/internal/mysql + - sqlconnect/internal/postgres + - sqlconnect/internal/redshift + - sqlconnect/internal/snowflake + - sqlconnect/internal/trino + include: + - package: . + exclude: sqlconnect-go/sqlconnect/internal + - package: sqlconnect/internal + exclude: sqlconnect-go/sqlconnect/internal/(bigquery|databricks|mysql|postgres|redshift|snowflake|trino) + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: '1.22' + check-latest: true + - run: go version + - run: go mod download + - name: Run tests + run: | + make test exclude="${{ matrix.exclude }}" package=${{ matrix.package }} + env: + REDSHIFT_TEST_ENVIRONMENT_CREDENTIALS: ${{ secrets.REDSHIFT_TEST_ENVIRONMENT_CREDENTIALS }} + SNOWFLAKE_TEST_ENVIRONMENT_CREDENTIALS: ${{ secrets.SNOWFLAKE_TEST_ENVIRONMENT_CREDENTIALS }} + BIGQUERY_TEST_ENVIRONMENT_CREDENTIALS: ${{ secrets.BIGQUERY_TEST_ENVIRONMENT_CREDENTIALS }} + DATABRICKS_TEST_ENVIRONMENT_CREDENTIALS: ${{ secrets.DATABRICKS_TEST_ENVIRONMENT_CREDENTIALS }} + TRINO_TEST_ENVIRONMENT_CREDENTIALS: ${{ secrets.TRINO_TEST_ENVIRONMENT_CREDENTIALS }} + - name: Sanitize name for Artifact + run: | + name=$(echo -n "${{ matrix.package }}" | sed -e 's/[ \t:.\/\\"<>|*?]/_/g' -e 's/--*/-/g') + echo "ARTIFACT_NAME=$name" >> $GITHUB_ENV + - name: Upload coverage report + uses: actions/upload-artifact@v4 + with: + name: ${{ env.ARTIFACT_NAME }}_profile.out + path: profile.out + coverage: + name: coverage + runs-on: 'ubuntu-20.04' + needs: + - tests + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: '1.22.0' + check-latest: true + - name: Download coverage reports + uses: actions/download-artifact@v4 + - name: Merge Coverage + run: | + go install github.com/wadey/gocovmerge@latest + gocovmerge */profile.out > profile.out + - uses: codecov/codecov-action@v4 + with: + fail_ci_if_error: true + files: ./profile.out + token: ${{ secrets.CODECOV_TOKEN }} + all-green: + name: all-tests + if: always() + runs-on: ubuntu-latest + needs: + - tests + steps: + - uses: re-actors/alls-green@v1.2.2 + with: + jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/verify.yml b/.github/workflows/verify.yml new file mode 100644 index 0000000..304a820 --- /dev/null +++ b/.github/workflows/verify.yml @@ -0,0 +1,56 @@ +name: verify +on: + push: + tags: + - v* + branches: + - master + - main + pull_request: +jobs: + generate: + name: generated files + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-go@v3 + with: + check-latest: true + cache: true + go-version: '1.22.0' + - run: go version + + - run: go mod tidy + - run: git diff --exit-code go.mod + - name: Error message + if: ${{ failure() }} + run: echo '::error file=go.mod,line=1,col=1::Inconsistent go mod file. Ensure you have run `go mod tidy` and committed the files locally.'; echo '::error file=enterprise_mod.go,line=1,col=1::Possible missing enterprise exclusive dependencies.' + + - run: make generate + - run: git diff --exit-code + - name: Error message + if: ${{ failure() }} + run: echo '::error file=Makefile,line=11,col=1::Incorrectly generated files. Ensure you have run `make generate` and committed the files locally.' + + - run: make fmt + - run: git diff --exit-code + - name: Error message + if: ${{ failure() }} + run: echo 'Not formatted files. Ensure you have run `make fmt` and committed the files locally.' + linting: + name: lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v3 + with: + go-version: '1.22.0' + check-latest: true + cache: true + - name: golangci-lint + uses: golangci/golangci-lint-action@v3 + with: + version: v1.56.2 + args: + -v + --timeout 10m diff --git a/.gitignore b/.gitignore index 3b735ec..a56504f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,21 +1,17 @@ -# If you prefer the allow list template instead of the deny list, see community template: -# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore -# -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE +.DS_Store +.vscode +*.coverprofile +runtime.log +*.coverprofile +junit*.xml +**/profile.out +**/*.test +.idea/* +*.out.* *.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -# Go workspace file -go.work +coverage.txt +coverage.html +*.orig +**/gomock_reflect_*/* +ginkgo.report +vendor/ diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..7b74e26 --- /dev/null +++ b/Makefile @@ -0,0 +1,53 @@ +.PHONY: help default test test-run generate lint fmt + +GO=go +LDFLAGS?=-s -w +TESTFILE=_testok + +default: lint + +generate: install-tools + $(GO) generate ./... + +test: install-tools test-run + +test-run: ## Run all unit tests +ifeq ($(filter 1,$(debug) $(RUNNER_DEBUG)),) + $(eval TEST_CMD = gotestsum --format pkgname-and-test-fails --) + $(eval TEST_OPTIONS = -p=1 -v -failfast -shuffle=on -coverprofile=profile.out -covermode=atomic -coverpkg=./... -vet=all --timeout=30m) +else + $(eval TEST_CMD = SLOW=0 go test) + $(eval TEST_OPTIONS = -p=1 -v -failfast -shuffle=on -coverprofile=profile.out -covermode=atomic -coverpkg=./... -vet=all --timeout=30m) +endif +ifdef package +ifdef exclude + $(eval FILES = `go list ./$(package)/... | egrep -iv '$(exclude)'`) + $(TEST_CMD) -count=1 $(TEST_OPTIONS) $(FILES) && touch $(TESTFILE) +else + $(TEST_CMD) $(TEST_OPTIONS) ./$(package)/... && touch $(TESTFILE) +endif +else ifdef exclude + $(eval FILES = `go list ./... | egrep -iv '$(exclude)'`) + $(TEST_CMD) -count=1 $(TEST_OPTIONS) $(FILES) && touch $(TESTFILE) +else + $(TEST_CMD) -count=1 $(TEST_OPTIONS) ./... && touch $(TESTFILE) +endif + +help: ## Show the available commands + @grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' ./Makefile | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +install-tools: + go install github.com/golang/mock/mockgen@v1.6.0 + go install mvdan.cc/gofumpt@latest + go install gotest.tools/gotestsum@v1.8.2 + go install golang.org/x/tools/cmd/goimports@latest + bash ./internal/scripts/install-golangci-lint.sh v1.56.2 + +.PHONY: lint +lint: fmt ## Run linters on all go files + golangci-lint run -v --timeout 5m + +.PHONY: fmt +fmt: install-tools ## Formats all go files + gofumpt -l -w -extra . + find . -type f -name '*.go' -exec grep -L -E 'Code generated by .*\. DO NOT EDIT.' {} + | xargs goimports -format-only -w -local=github.com/rudderlabs \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..aeb8c6b --- /dev/null +++ b/README.md @@ -0,0 +1,97 @@ +# sqlconnect + +Sqlconnect provides a uniform client interface for accessing multiple warehouses: + +- bigquery ([configuration](sqlconnect/internal/bigquery/config.go)) +- databricks ([configuration](sqlconnect/internal/databricks/config.go)) +- mysql ([configuration](sqlconnect/internal/mysql/config.go)) +- postgres ([configuration](sqlconnect/internal/postgres/config.go)) +- redshift ([configuration](sqlconnect/internal/postgres/config.go)) +- snowflake ([configuration](sqlconnect/internal/snowflake/config.go)) +- trino ([configuration](sqlconnect/internal/trino/config.go)) + +## Installation + +```bash +go get github.com/rudderlabs/sqlconnect-go +``` + +## Usage + +**Creating a new DB client** +```go +db, err := sqlconnect.NewDB("postgres", []byte(`{ + "host": "postgres.example.com", + "port": 5432, + "dbname": "dbname", + "user": "user", + "password": "password" + +}`)) + +if err != nil { + panic(err) +} +``` + +**Creating a new DB client using legacy mappings for backwards compatibility** +```go +db, err := sqlconnect.NewDB("postgres", []byte(`{ + "host": "postgres.example.com", + "port": 5432, + "dbname": "dbname", + "user": "user", + "password": "password", + "legacyMappings": useLegacyMappings + +}`)) + +if err != nil { + panic(err) +} +``` + + +**Performing admin operations** +```go +{ // schema admin + exists, err := db.SchemaExists(ctx, sqlconnect.SchemaRef{Name: "schema"}) + if err != nil { + panic(err) + } + if !exists { + err = db.CreateSchema(ctx, sqlconnect.SchemaRef{Name: "schema"}) + if err != nil { + panic(err) + } + } +} + +// table admin +{ + exists, err := db.TableExists(ctx, sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema"))) + if err != nil { + panic(err) + } + if !exists { + err = db.CreateTestTable(ctx, sqlconnect.RelationRef{Schema: "schema", Name: "table"}) + if err != nil { + panic(err) + } + } +} +``` + +**Using the async query API** +```go +table := sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema")) + +ch, leave := sqlconnect.QueryJSONAsync(ctx, db, "SELECT * FROM " + db.QuoteTable(table)) +defer leave() +for row := range ch { + if row.Err != nil { + panic(row.Err) + } + _ = row.Value +} +``` \ No newline at end of file diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..c2f8dfb --- /dev/null +++ b/go.mod @@ -0,0 +1,147 @@ +module github.com/rudderlabs/sqlconnect-go + +go 1.22.0 + +require ( + cloud.google.com/go v0.112.0 + cloud.google.com/go/bigquery v1.59.1 + github.com/databricks/databricks-sql-go v1.5.3 + github.com/dlclark/regexp2 v1.11.0 + github.com/go-sql-driver/mysql v1.7.1 + github.com/lib/pq v1.10.9 + github.com/ory/dockertest/v3 v3.10.0 + github.com/rudderlabs/rudder-go-kit v0.23.0 + github.com/samber/lo v1.39.0 + github.com/sirupsen/logrus v1.9.3 + github.com/snowflakedb/gosnowflake v1.7.2 + github.com/stretchr/testify v1.8.4 + github.com/tidwall/sjson v1.2.5 + github.com/trinodb/trino-go-client v0.313.0 + google.golang.org/api v0.166.0 +) + +require ( + cloud.google.com/go/compute v1.23.4 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v1.1.6 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.2 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 // indirect + github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c // indirect + github.com/Microsoft/go-winio v0.6.0 // indirect + github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect + github.com/andybalholm/brotli v1.0.5 // indirect + github.com/apache/arrow/go/v12 v12.0.1 // indirect + github.com/apache/arrow/go/v14 v14.0.2 // indirect + github.com/apache/thrift v0.17.0 // indirect + github.com/aws/aws-sdk-go-v2 v1.17.7 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.13.18 // indirect + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 // indirect + github.com/aws/smithy-go v1.13.5 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/containerd/continuity v0.3.0 // indirect + github.com/coreos/go-oidc/v3 v3.5.0 // indirect + github.com/danieljoos/wincred v1.1.2 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/dnephin/pflag v1.0.7 // indirect + github.com/docker/cli v20.10.17+incompatible // indirect + github.com/docker/docker v20.10.27+incompatible // indirect + github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/dvsekhvalnov/jose2go v1.6.0 // indirect + github.com/fatih/color v1.15.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect + github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/go-jose/go-jose/v3 v3.0.0 // indirect + github.com/go-logr/logr v1.4.1 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/goccy/go-json v0.10.2 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/flatbuffers v23.5.26+incompatible // indirect + github.com/google/s2a-go v0.1.7 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect + github.com/googleapis/gax-go/v2 v2.12.1 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/hashicorp/go-cleanhttp v0.5.1 // indirect + github.com/hashicorp/go-retryablehttp v0.7.1 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/imdario/mergo v0.3.13 // indirect + github.com/jcmturner/gofork v1.7.6 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/klauspost/asmfmt v1.3.2 // indirect + github.com/klauspost/compress v1.17.4 // indirect + github.com/klauspost/cpuid/v2 v2.2.6 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect + github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 // indirect + github.com/opencontainers/runc v1.1.12 // indirect + github.com/pierrec/lz4/v4 v4.1.18 // indirect + github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/rs/zerolog v1.28.0 // indirect + github.com/tidwall/gjson v1.17.1 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.0 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/zeebo/xxh3 v1.0.2 // indirect + go.opencensus.io v0.24.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.48.0 // indirect + go.opentelemetry.io/otel v1.23.1 // indirect + go.opentelemetry.io/otel/metric v1.23.1 // indirect + go.opentelemetry.io/otel/trace v1.23.1 // indirect + golang.org/x/crypto v0.19.0 // indirect + golang.org/x/exp v0.0.0-20240119083558-1b970713d09a // indirect + golang.org/x/mod v0.14.0 // indirect + golang.org/x/net v0.21.0 // indirect + golang.org/x/oauth2 v0.17.0 // indirect + golang.org/x/sync v0.6.0 // indirect + golang.org/x/sys v0.17.0 // indirect + golang.org/x/term v0.17.0 // indirect + golang.org/x/text v0.14.0 // indirect + golang.org/x/time v0.5.0 // indirect + golang.org/x/tools v0.17.0 // indirect + golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/genproto v0.0.0-20240205150955-31a09d347014 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240213162025-012b6fc9bca9 // indirect + google.golang.org/grpc v1.61.1 // indirect + google.golang.org/protobuf v1.32.0 // indirect + gopkg.in/jcmturner/aescts.v1 v1.0.1 // indirect + gopkg.in/jcmturner/dnsutils.v1 v1.0.1 // indirect + gopkg.in/jcmturner/gokrb5.v6 v6.1.1 // indirect + gopkg.in/jcmturner/rpc.v1 v1.1.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + gotest.tools/gotestsum v1.8.2 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..707022b --- /dev/null +++ b/go.sum @@ -0,0 +1,532 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= +cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4= +cloud.google.com/go/bigquery v1.59.1 h1:CpT+/njKuKT3CEmswm6IbhNu9u35zt5dO4yPDLW+nG4= +cloud.google.com/go/bigquery v1.59.1/go.mod h1:VP1UJYgevyTwsV7desjzNzDND5p6hZB+Z8gZJN1GQUc= +cloud.google.com/go/compute v1.23.4 h1:EBT9Nw4q3zyE7G45Wvv3MzolIrCJEuHys5muLY0wvAw= +cloud.google.com/go/compute v1.23.4/go.mod h1:/EJMj55asU6kAFnuZET8zqgwgJ9FvXWXOkkfQZa4ioI= +cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/datacatalog v1.19.3 h1:A0vKYCQdxQuV4Pi0LL9p39Vwvg4jH5yYveMv50gU5Tw= +cloud.google.com/go/datacatalog v1.19.3/go.mod h1:ra8V3UAsciBpJKQ+z9Whkxzxv7jmQg1hfODr3N3YPJ4= +cloud.google.com/go/iam v1.1.6 h1:bEa06k05IO4f4uJonbB5iAgKTPpABy1ayxaIZV/GHVc= +cloud.google.com/go/iam v1.1.6/go.mod h1:O0zxdPeGBoFdWW3HWmBxJsk0pfvNM/p/qa82rWOGTwI= +cloud.google.com/go/longrunning v0.5.5 h1:GOE6pZFdSrTb4KAiKnXsJBtlE6mEyaW44oKyMILWnOg= +cloud.google.com/go/longrunning v0.5.5/go.mod h1:WV2LAxD8/rg5Z1cNW6FJ/ZpX4E4VnDnoTk0yawPBB7s= +cloud.google.com/go/storage v1.38.0 h1:Az68ZRGlnNTpIBbLjSMIV2BDcwwXYlRlQzis0llkpJg= +cloud.google.com/go/storage v1.38.0/go.mod h1:tlUADB0mAb9BgYls9lq+8MGkfzOXuLrnHXlpHmvFJoY= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= +github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= +github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0 h1:rTnT/Jrcm+figWlYz4Ixzt0SJVR2cMC8lvZcimipiEY= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0/go.mod h1:ON4tFdPTwRcgWEaVDrN3584Ef+b7GgSJaXxe5fW9t4M= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0 h1:QkAcEIAKbNL4KoFr4SathZPhDhF4mVwpBMFlYjyAqy8= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0/go.mod h1:bhXu1AjYL+wutSL/kpSq6s7733q2Rb0yuot9Zgfqa/0= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 h1:+5VZ72z0Qan5Bog5C+ZkgSqUbeVUd9wgtHOrIKuc5b8= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 h1:u/LLAOFgsMv7HmNL4Qufg58y+qElGOt5qv0z1mURkRY= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0/go.mod h1:2e8rMJtl2+2j+HXbTBwnyGpm5Nou7KhvSfxOq8JpTag= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v0.5.1 h1:BWe8a+f/t+7KY7zH2mqygeUD0t8hNFXe08p1Pb3/jKE= +github.com/AzureAD/microsoft-authentication-library-for-go v0.5.1/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= +github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= +github.com/Microsoft/go-winio v0.6.0 h1:slsWYD/zyx7lCXoZVlvQrj0hPTM1HI4+v1sIda2yDvg= +github.com/Microsoft/go-winio v0.6.0/go.mod h1:cTAf44im0RAYeL23bpB+fzCyDH2MJiz2BO69KH/soAE= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= +github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= +github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/apache/arrow/go/v12 v12.0.1 h1:JsR2+hzYYjgSUkBSaahpqCetqZMr76djX80fF/DiJbg= +github.com/apache/arrow/go/v12 v12.0.1/go.mod h1:weuTY7JvTG/HDPtMQxEUp7pU73vkLWMLpY67QwZ/WWw= +github.com/apache/arrow/go/v14 v14.0.2 h1:N8OkaJEOfI3mEZt07BIkvo4sC6XDbL+48MBPWO5IONw= +github.com/apache/arrow/go/v14 v14.0.2/go.mod h1:u3fgh3EdgN/YQ8cVQRguVW3R+seMybFg8QBQ5LU+eBY= +github.com/apache/thrift v0.17.0 h1:cMd2aj52n+8VoAtvSvLn4kDC3aZ6IAkBuqWQ2IDu7wo= +github.com/apache/thrift v0.17.0/go.mod h1:OLxhMRJxomX+1I/KUw03qoV3mMz16BwaKI+d4fPBx7Q= +github.com/aws/aws-sdk-go-v2 v1.17.7 h1:CLSjnhJSTSogvqUGhIC6LqFKATMRexcxLZ0i/Nzk9Eg= +github.com/aws/aws-sdk-go-v2 v1.17.7/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 h1:dK82zF6kkPeCo8J1e+tGx4JdvDIQzj7ygIoLg8WMuGs= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10/go.mod h1:VeTZetY5KRJLuD/7fkQXMU6Mw7H5m/KP2J5Iy9osMno= +github.com/aws/aws-sdk-go-v2/config v1.18.19 h1:AqFK6zFNtq4i1EYu+eC7lcKHYnZagMn6SW171la0bGw= +github.com/aws/aws-sdk-go-v2/config v1.18.19/go.mod h1:XvTmGMY8d52ougvakOv1RpiTLPz9dlG/OQHsKU/cMmY= +github.com/aws/aws-sdk-go-v2/credentials v1.13.18 h1:EQMdtHwz0ILTW1hoP+EwuWhwCG1hD6l3+RWFQABET4c= +github.com/aws/aws-sdk-go-v2/credentials v1.13.18/go.mod h1:vnwlwjIe+3XJPBYKu1et30ZPABG3VaXJYr8ryohpIyM= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1 h1:gt57MN3liKiyGopcqgNzJb2+d9MJaKT/q1OksHNXVE4= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1/go.mod h1:lfUx8puBRdM5lVVMQlwt2v+ofiG/X6Ms+dy0UkG/kXw= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 h1:E3Y+OfzOK1+rmRo/K2G0ml8Vs+Xqk0kOnf4nS0kUtBc= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59/go.mod h1:1M4PLSBUVfBI0aP+C9XI7SM6kZPCGYyI6izWz0TGprE= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31 h1:sJLYcS+eZn5EeNINGHSCRAwUJMFVqklwkH36Vbyai7M= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31/go.mod h1:QT0BqUvX1Bh2ABdTGnjqEjvjzrCfIniM9Sc8zn9Yndo= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25 h1:1mnRASEKnkqsntcxHaysxwgVoUUp5dkiB+l3llKnqyg= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25/go.mod h1:zBHOPwhBc3FlQjQJE/D3IfPWiWaQmT06Vq9aNukDo0k= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32 h1:p5luUImdIqywn6JpQsW3tq5GNOxKmOnEpybzPx+d1lk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32/go.mod h1:XGhIBZDEgfqmFIugclZ6FU7v75nHhBDtzuB4xB/tEi4= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23 h1:DWYZIsyqagnWL00f8M/SOr9fN063OEQWn9LLTbdYXsk= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23/go.mod h1:uIiFgURZbACBEQJfqTZPb/jxO7R+9LeoHUFudtIdeQI= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 h1:y2+VQzC6Zh2ojtV2LoC0MNwHWc6qXv/j2vrQtlftkdA= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11/go.mod h1:iV4q2hsqtNECrfmlXyord9u4zyuFEJX9eLgLpSPzWA8= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 h1:CeuSeq/8FnYpPtnuIeLQEEvDv9zUjneuYi8EghMBdwQ= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26/go.mod h1:2UqAAwMUXKeRkAHIlDJqvMVgOWkUi/AUXPk/YIe+Dg4= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25 h1:5LHn8JQ0qvjD9L9JhMtylnkcw7j05GDZqM9Oin6hpr0= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25/go.mod h1:/95IA+0lMnzW6XzqYJRpjjsAbKEORVeO0anQqjd2CNU= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 h1:e2ooMhpYGhDnBfSvIyusvAwX7KexuZaHbQY2Dyei7VU= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0/go.mod h1:bh2E0CXKZsQN+faiKVqC40vfNMAWheoULBCnEgO9K+8= +github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 h1:B1G2pSPvbAtQjilPq+Y7jLIzCOwKzuVEl+aBBaNG0AQ= +github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0/go.mod h1:ncltU6n4Nof5uJttDtcNQ537uNuwYqsZZQcpkd2/GUQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.12.6 h1:5V7DWLBd7wTELVz5bPpwzYy/sikk0gsgZfj40X+l5OI= +github.com/aws/aws-sdk-go-v2/service/sso v1.12.6/go.mod h1:Y1VOmit/Fn6Tz1uFAeCO6Q7M2fmfXSCLeL5INVYsLuY= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6 h1:B8cauxOH1W1v7rd8RdI/MWnoR4Ze0wIHWrb90qczxj4= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6/go.mod h1:Lh/bc9XUf8CfOY6Jp5aIkQtN+j1mc+nExc+KXj9jx2s= +github.com/aws/aws-sdk-go-v2/service/sts v1.18.7 h1:bWNgNdRko2x6gqa0blfATqAZKZokPIeM1vfmQt2pnvM= +github.com/aws/aws-sdk-go-v2/service/sts v1.18.7/go.mod h1:JuTnSoeePXmMVe9G8NcjjwgOKEfZ4cOjMuT2IBT/2eI= +github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8= +github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101 h1:7To3pQ+pZo0i3dsWEbinPNFs5gPSBOsJtx3wTT94VBY= +github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= +github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= +github.com/coreos/go-oidc/v3 v3.5.0 h1:VxKtbccHZxs8juq7RdJntSqtXFtde9YpNpGn0yqgEHw= +github.com/coreos/go-oidc/v3 v3.5.0/go.mod h1:ecXRtV4romGPeO6ieExAsUK9cb/3fp9hXNz1tlv8PIM= +github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= +github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/danieljoos/wincred v1.1.2 h1:QLdCxFs1/Yl4zduvBdcHB8goaYk9RARS2SgLLRuAyr0= +github.com/danieljoos/wincred v1.1.2/go.mod h1:GijpziifJoIBfYh+S7BbkdUTU4LfM+QnGqR5Vl2tAx0= +github.com/databricks/databricks-sql-go v1.5.3 h1:A8pe8azPkGM/1lD2wWf1G6sW5PDuLM9qBfSIbLuWzvw= +github.com/databricks/databricks-sql-go v1.5.3/go.mod h1:8aNP4vhhfBJZhc4n7LcSAvtJsZ+Dc25Q6mGxM1/8WaY= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dnaeon/go-vcr v1.1.0 h1:ReYa/UBrRyQdant9B4fNHGoCNKw6qh6P0fsdGmZpR7c= +github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= +github.com/dnephin/pflag v1.0.7 h1:oxONGlWxhmUct0YzKTgrpQv9AUA1wtPBn7zuSjJqptk= +github.com/dnephin/pflag v1.0.7/go.mod h1:uxE91IoWURlOiTUIA8Mq5ZZkAv3dPUfZNaT80Zm7OQE= +github.com/docker/cli v20.10.17+incompatible h1:eO2KS7ZFeov5UJeaDmIs1NFEDRf32PaqRpvoEkKBy5M= +github.com/docker/cli v20.10.17+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/docker v20.10.27+incompatible h1:Id/ZooynV4ZlD6xX20RCd3SR0Ikn7r4QZDa2ECK2TgA= +github.com/docker/docker v20.10.27+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY= +github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= +github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= +github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= +github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= +github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= +github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI= +github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c= +github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= +github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= +github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.12.1 h1:9F8GV9r9ztXyAi00gsMQHNoF51xPZm8uj1dpYt2ZETM= +github.com/googleapis/gax-go/v2 v2.12.1/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI= +github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-retryablehttp v0.7.1 h1:sUiuQAnLlbvmExtFQs72iFW/HXeUn8Z1aJLQ4LJJbTQ= +github.com/hashicorp/go-retryablehttp v0.7.1/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= +github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= +github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= +github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= +github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= +github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= +github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= +github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= +github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= +github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc= +github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= +github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 h1:rc3tiVYb5z54aKaDfakKn0dDjIyPpTtszkjuMzyt7ec= +github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/runc v1.1.12 h1:BOIssBaW1La0/qbNZHXOOa71dZfZEQOzW7dqQf3phss= +github.com/opencontainers/runc v1.1.12/go.mod h1:S+lQwSfncpBha7XTy/5lBwWgm5+y5Ma/O44Ekby9FK8= +github.com/ory/dockertest/v3 v3.10.0 h1:4K3z2VMe8Woe++invjaTB7VRyQXQy5UY+loujO4aNE4= +github.com/ory/dockertest/v3 v3.10.0/go.mod h1:nr57ZbRWMqfsdGdFNLHz5jjNdDb7VVFnzAeW1n5N1Lg= +github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= +github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.28.0 h1:MirSo27VyNi7RJYP3078AA1+Cyzd2GB66qy3aUHvsWY= +github.com/rs/zerolog v1.28.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= +github.com/rudderlabs/rudder-go-kit v0.23.0 h1:lldcc5h0JtFJHvoGWhDAcSwF1Lb2D2kNHZZ390QFKdU= +github.com/rudderlabs/rudder-go-kit v0.23.0/go.mod h1:iZd7N3uQqMS5ROnKTD/1bR6IbTrWbeYJ86YjyJw8K5k= +github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= +github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/snowflakedb/gosnowflake v1.7.2 h1:HRSwva8YXC64WUppfmHcMNVVzSE1+EwXXaJxgS0EkTo= +github.com/snowflakedb/gosnowflake v1.7.2/go.mod h1:03tW856vc3ceM4rJuj7KO4dzqN7qoezTm+xw7aPIIFo= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= +github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/trinodb/trino-go-client v0.313.0 h1:lp8N9JKTqMuZ9LlAwLjgUtkwDnJc8fjpJmunpZ3afjk= +github.com/trinodb/trino-go-client v0.313.0/go.mod h1:YpZf2WAClFhU+n0ZhdkmMbugYaMRM/mjywiQru0wpeQ= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= +github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= +github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= +github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0 h1:P+/g8GpuJGYbOp2tAdKrIPUX9JO02q8Q0YNlHolpibA= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0/go.mod h1:tIKj3DbO8N9Y2xo52og3irLsPI4GW02DSMtrVgNMgxg= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.48.0 h1:doUP+ExOpH3spVTLS0FcWGLnQrPct/hD/bCPbDRUEAU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.48.0/go.mod h1:rdENBZMT2OE6Ne/KLwpiXudnAsbdrdBaqBvTN8M8BgA= +go.opentelemetry.io/otel v1.23.1 h1:Za4UzOqJYS+MUczKI320AtqZHZb7EqxO00jAHE0jmQY= +go.opentelemetry.io/otel v1.23.1/go.mod h1:Td0134eafDLcTS4y+zQ26GE8u3dEuRBiBCTUIRHaikA= +go.opentelemetry.io/otel/metric v1.23.1 h1:PQJmqJ9u2QaJLBOELl1cxIdPcpbwzbkjfEyelTl2rlo= +go.opentelemetry.io/otel/metric v1.23.1/go.mod h1:mpG2QPlAfnK8yNhNJAxDZruU9Y1/HubbC+KyH8FaCWI= +go.opentelemetry.io/otel/sdk v1.23.1 h1:O7JmZw0h76if63LQdsBMKQDWNb5oEcOThG9IrxscV+E= +go.opentelemetry.io/otel/sdk v1.23.1/go.mod h1:LzdEVR5am1uKOOwfBWFef2DCi1nu3SA8XQxx2IerWFk= +go.opentelemetry.io/otel/trace v1.23.1 h1:4LrmmEd8AU2rFvU1zegmvqW7+kWarxtNOPyeL6HmYY8= +go.opentelemetry.io/otel/trace v1.23.1/go.mod h1:4IpnpJFwr1mo/6HL8XIPJaE9y0+u1KcVmuW7dwFSVrI= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20240119083558-1b970713d09a h1:Q8/wZp0KX97QFTc2ywcOE0YRjZPVIx+MXInMzdvQqcA= +golang.org/x/exp v0.0.0-20240119083558-1b970713d09a/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= +golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.3.0/go.mod h1:rQrIauxkUhJ6CuwEXwymO2/eh4xz2ZWF1nBkcxS+tGk= +golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ= +golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= +golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= +golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= +gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o= +gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY= +google.golang.org/api v0.166.0 h1:6m4NUwrZYhAaVIHZWxaKjw1L1vNAjtMwORmKRyEEo24= +google.golang.org/api v0.166.0/go.mod h1:4FcBc686KFi7QI/U51/2GKKevfZMpM17sCdibqe/bSA= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20240205150955-31a09d347014 h1:g/4bk7P6TPMkAUbUhquq98xey1slwvuVJPosdBqYJlU= +google.golang.org/genproto v0.0.0-20240205150955-31a09d347014/go.mod h1:xEgQu1e4stdSSsxPDK8Azkrk/ECl5HvdPf6nbZrTS5M= +google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 h1:x9PwdEgd11LgK+orcck69WVRo7DezSO4VUMPI4xpc8A= +google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014/go.mod h1:rbHMSEDyoYX62nRVLOCc4Qt1HbsdytAYoVwgjiOhF3I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240213162025-012b6fc9bca9 h1:hZB7eLIaYlW9qXRfCq/qDaPdbeY3757uARz5Vvfv+cY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:YUWgXUFRPfoYK1IHMuxH5K6nPEXSCzIMljnQ59lLRCk= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.61.1 h1:kLAiWrZs7YeDM6MumDe7m3y4aM6wacLzM1Y/wiLP9XY= +google.golang.org/grpc v1.61.1/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= +google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/jcmturner/aescts.v1 v1.0.1 h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw= +gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= +gopkg.in/jcmturner/dnsutils.v1 v1.0.1 h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM= +gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= +gopkg.in/jcmturner/goidentity.v3 v3.0.0 h1:1duIyWiTaYvVx3YX2CYtpJbUFd7/UuPYCfgXtQ3VTbI= +gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4= +gopkg.in/jcmturner/gokrb5.v6 v6.1.1 h1:n0KFjpbuM5pFMN38/Ay+Br3l91netGSVqHPHEXeWUqk= +gopkg.in/jcmturner/gokrb5.v6 v6.1.1/go.mod h1:NFjHNLrHQiruory+EmqDXCGv6CrjkeYeA+bR9mIfNFk= +gopkg.in/jcmturner/rpc.v1 v1.1.0 h1:QHIUxTX1ISuAv9dD2wJ9HWQVuWDX/Zc0PfeC2tjc4rU= +gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/gotestsum v1.8.2 h1:szU3TaSz8wMx/uG+w/A2+4JUPwH903YYaMI9yOOYAyI= +gotest.tools/gotestsum v1.8.2/go.mod h1:6JHCiN6TEjA7Kaz23q1bH0e2Dc3YJjDUZ0DmctFZf+w= +gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= +gotest.tools/v3 v3.3.0 h1:MfDY1b1/0xN1CyMlQDac0ziEy9zJQd9CXBRRDHw2jJo= +gotest.tools/v3 v3.3.0/go.mod h1:Mcr9QNxkg0uMvy/YElmo4SpXgJKWgQvYrT7Kw5RzJ1A= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/internal/scripts/install-golangci-lint.sh b/internal/scripts/install-golangci-lint.sh new file mode 100755 index 0000000..6a84fd8 --- /dev/null +++ b/internal/scripts/install-golangci-lint.sh @@ -0,0 +1,7 @@ +#!/bin/bash +VERSION=$1 +[ -z "${VERSION}" ] && VERSION="v1.55.2" +GOPATH=$(go env GOPATH) +[ -f "${GOPATH}/bin/golangci-lint-${VERSION}" ] && echo "golangci-lint ${VERSION} is already installed" || \ +curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/${VERSION}/install.sh | sh -s -- -b ${GOPATH}/bin ${VERSION} && \ +cp ${GOPATH}/bin/golangci-lint ${GOPATH}/bin/golangci-lint-${VERSION} \ No newline at end of file diff --git a/sqlconnect/async.go b/sqlconnect/async.go new file mode 100644 index 0000000..2e947e5 --- /dev/null +++ b/sqlconnect/async.go @@ -0,0 +1,134 @@ +package sqlconnect + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + + "github.com/rudderlabs/rudder-go-kit/async" +) + +type JsonQueryDB interface { + JsonRowMapper + QueryContext(ctx context.Context, query string, args ...any) (*sql.Rows, error) +} + +type QueryDB interface { + QueryContext(ctx context.Context, query string, args ...any) (*sql.Rows, error) +} + +// QueryJSONMapAsync executes a query and returns a channel that will receive the results as a map or an error, along with a function that the caller can use to leave the channel early. +// The channel will be closed when the query is done or when the context is canceled. +func QueryJSONMapAsync(ctx context.Context, db JsonQueryDB, query string, params ...any) (ch <-chan ValueOrError[map[string]any], leave func()) { + return QueryAsync[map[string]any](ctx, db, db.JSONRowMapper(), query, params...) +} + +// QueryJSONAsync executes a query and returns a channel that will receive the results as json or an error, along with a function that the caller can use to leave the channel early. +// The channel will be closed when the query is done or when the context is canceled. +func QueryJSONAsync(ctx context.Context, db JsonQueryDB, query string, params ...any) (ch <-chan ValueOrError[json.RawMessage], leave func()) { + jsonRowMapper := db.JSONRowMapper() + mapper := func(cols []*sql.ColumnType, row RowScan) (json.RawMessage, error) { + m, err := jsonRowMapper(cols, row) + if err != nil { + return nil, err + } + b, err := json.Marshal(m) + if err != nil { + return nil, fmt.Errorf("marshalling rows to json: %w", err) + } + return b, nil + } + return QueryAsync[json.RawMessage](ctx, db, mapper, query, params...) +} + +// QueryAsync executes a query and returns a channel that will receive the results or an error, along with a function that the caller can use to leave the channel early. +// The channel will be closed when the query is done or when the context is canceled. +func QueryAsync[T any](ctx context.Context, db QueryDB, mapper RowMapper[T], query string, params ...any) (ch <-chan ValueOrError[T], leave func()) { + s := &async.SingleSender[ValueOrError[T]]{} + ctx, ch, leave = s.Begin(ctx) + go func() { + defer s.Close() + rows, err := db.QueryContext(ctx, query, params...) + if err != nil { + s.Send(ValueOrError[T]{Err: fmt.Errorf("executing query: %w", err)}) + return + } + defer func() { _ = rows.Close() }() + cols, err := rows.ColumnTypes() + if err != nil { + s.Send(ValueOrError[T]{Err: fmt.Errorf("getting column types: %w", err)}) + return + } + for rows.Next() { + select { + case <-ctx.Done(): + s.Send(ValueOrError[T]{Err: ctx.Err()}) + return + default: + } + v, err := mapper(cols, rows) + if err != nil { + s.Send(ValueOrError[T]{Err: fmt.Errorf("mapping row: %w", err)}) + return + } + s.Send(ValueOrError[T]{Value: v}) + } + if err := rows.Err(); err != nil { + s.Send(ValueOrError[T]{Err: fmt.Errorf("iterating rows: %w", err)}) + } + }() + return +} + +// ValueOrError represents a value or an error +type ValueOrError[T any] struct { + Value T + Err error +} + +// RowScan is an interface that represents a row scanner +type RowScan interface { + Scan(dest ...any) error +} + +// RowMapper is a function that maps database rows to a value +type RowMapper[T any] func(cols []*sql.ColumnType, row RowScan) (T, error) + +// JSONRowMapper returns a row mapper that scans rows and maps them to [map[string]any] +func JSONRowMapper(valueMapper func(databaseTypeName string, value any) any) RowMapper[map[string]any] { + return func(cols []*sql.ColumnType, row RowScan) (map[string]any, error) { + values := make([]any, len(cols)) + for i := range values { + values[i] = new(NilAny) + } + if err := row.Scan(values...); err != nil { + return nil, fmt.Errorf("scanning row: %w", err) + } + o := map[string]any{} + for i := range values { + v := values[i].(*NilAny) + var val any + if v != nil { + val = v.Value + } + col := cols[i] + o[col.Name()] = valueMapper(col.DatabaseTypeName(), val) + } + return o, nil + } +} + +type NilAny struct { + Value any +} + +func (v *NilAny) Scan(src any) error { + switch src.(type) { + case nil: + v.Value = nil + default: + v.Value = src + } + return nil +} diff --git a/sqlconnect/columnref.go b/sqlconnect/columnref.go new file mode 100644 index 0000000..95304a1 --- /dev/null +++ b/sqlconnect/columnref.go @@ -0,0 +1,8 @@ +package sqlconnect + +// ColumnRef provides a reference to a table column +type ColumnRef struct { + Name string `json:"name"` + Type string `json:"type"` + RawType string `json:"rawType"` +} diff --git a/sqlconnect/config/config.go b/sqlconnect/config/config.go new file mode 100644 index 0000000..b282be7 --- /dev/null +++ b/sqlconnect/config/config.go @@ -0,0 +1,19 @@ +package config + +import ( + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/bigquery" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/databricks" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/mysql" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/postgres" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/snowflake" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/trino" +) + +type ( + BigQuery = bigquery.Config + Databricks = databricks.Config + Mysql = mysql.Config + Postgres = postgres.Config + Snowflake = snowflake.Config + Trino = trino.Config +) diff --git a/sqlconnect/db.go b/sqlconnect/db.go new file mode 100644 index 0000000..fbe7bed --- /dev/null +++ b/sqlconnect/db.go @@ -0,0 +1,100 @@ +package sqlconnect + +import ( + "context" + "database/sql" + "database/sql/driver" + "errors" + "time" +) + +var ErrDropOldTablePostCopy = errors.New("move table: dropping old table after copying its contents to the new table") + +type DB interface { + sqlDB + // SqlDB returns the underlying *sql.DB + SqlDB() *sql.DB + SchemaAdmin + TableAdmin + JsonRowMapper + Dialect +} + +type sqlDB interface { + Begin() (*sql.Tx, error) + BeginTx(ctx context.Context, opts *sql.TxOptions) (*sql.Tx, error) + Close() error + Conn(ctx context.Context) (*sql.Conn, error) + Driver() driver.Driver + Exec(query string, args ...any) (sql.Result, error) + ExecContext(ctx context.Context, query string, args ...any) (sql.Result, error) + Ping() error + PingContext(ctx context.Context) error + Prepare(query string) (*sql.Stmt, error) + PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) + Query(query string, args ...any) (*sql.Rows, error) + QueryContext(ctx context.Context, query string, args ...any) (*sql.Rows, error) + QueryRow(query string, args ...any) *sql.Row + QueryRowContext(ctx context.Context, query string, args ...any) *sql.Row + SetConnMaxIdleTime(d time.Duration) + SetConnMaxLifetime(d time.Duration) + SetMaxIdleConns(n int) + SetMaxOpenConns(n int) + Stats() sql.DBStats +} + +type SchemaAdmin interface { + // CreateSchema creates a schema + CreateSchema(ctx context.Context, schema SchemaRef) error + // GetRudderSchema returns the name of the rudder schema + GetRudderSchema() string + // GetSchemas returns a list of schemas + ListSchemas(ctx context.Context) ([]SchemaRef, error) + // SchemaExists returns true if the schema exists + SchemaExists(ctx context.Context, schemaRef SchemaRef) (bool, error) + // DropSchema drops a schema + DropSchema(ctx context.Context, schema SchemaRef) error +} + +type TableAdmin interface { + // CreateTestTable creates a test table + CreateTestTable(ctx context.Context, relation RelationRef) error + // ListTables returns a list of tables in the given schema + ListTables(ctx context.Context, schema SchemaRef) ([]RelationRef, error) + // ListTablesWithPrefix returns a list of tables in the given schema that have the given prefix + ListTablesWithPrefix(ctx context.Context, schema SchemaRef, prefix string) ([]RelationRef, error) + // TableExists returns true if the table exists + TableExists(ctx context.Context, relation RelationRef) (bool, error) + // ListColumns returns a list of columns for the given table + ListColumns(ctx context.Context, relation RelationRef) ([]ColumnRef, error) + // ListColumnsForSqlQuery returns a list of columns for the given sql query + ListColumnsForSqlQuery(ctx context.Context, sql string) ([]ColumnRef, error) + // CountTableRows returns the number of rows in the given table + CountTableRows(ctx context.Context, table RelationRef) (count int, err error) + // DropTable drops a table + DropTable(ctx context.Context, ref RelationRef) error + // TruncateTable truncates a table + TruncateTable(ctx context.Context, ref RelationRef) error + // RenameTable renames a table. It might fall back to using MoveTable if the underlying database does not support renaming tables. + RenameTable(ctx context.Context, oldRef, newRef RelationRef) error + // MoveTable creates a new table by copying the old table's contents to it and then drops the old table. Returns [ErrDropOldTablePostCopy] if the old table could not be dropped after copy. + MoveTable(ctx context.Context, oldRef, newRef RelationRef) error + // CreateTableFromQuery creates a table from the results of a query + CreateTableFromQuery(ctx context.Context, table RelationRef, query string) error + // GetRowCountForQuery returns the number of rows returned by the query + GetRowCountForQuery(ctx context.Context, query string, params ...any) (int, error) +} + +type JsonRowMapper interface { + // JSONRowMapper returns a row mapper that maps rows to map[string]any + JSONRowMapper() RowMapper[map[string]any] +} + +type Dialect interface { + // QuoteTable quotes a table name + QuoteTable(table RelationRef) string + // QuoteIdentifier quotes an identifier, e.g. a column name + QuoteIdentifier(name string) string + // FormatTableName formats a table name, typically by lower or upper casing it, depending on the database + FormatTableName(name string) string +} diff --git a/sqlconnect/db_factory.go b/sqlconnect/db_factory.go new file mode 100644 index 0000000..351c9a2 --- /dev/null +++ b/sqlconnect/db_factory.go @@ -0,0 +1,23 @@ +package sqlconnect + +import ( + "encoding/json" + "fmt" +) + +// NewDB creates a new database client for the provided name. +func NewDB(name string, credentialsJSON json.RawMessage) (DB, error) { + factory, ok := dbfactories[name] + if !ok { + return nil, fmt.Errorf("unknown client factory: %s", name) + } + return factory(credentialsJSON) +} + +type DBFactory func(credentialsJSON json.RawMessage) (DB, error) + +var dbfactories = map[string]DBFactory{} + +func RegisterDBFactory(name string, factory DBFactory) { + dbfactories[name] = factory +} diff --git a/sqlconnect/db_factory_test.go b/sqlconnect/db_factory_test.go new file mode 100644 index 0000000..3148514 --- /dev/null +++ b/sqlconnect/db_factory_test.go @@ -0,0 +1,14 @@ +package sqlconnect_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestNewDB(t *testing.T) { + _, err := sqlconnect.NewDB("invalid", []byte{}) + require.Error(t, err, "should return error for invalid db name") +} diff --git a/sqlconnect/internal/base/db.go b/sqlconnect/internal/base/db.go new file mode 100644 index 0000000..8272507 --- /dev/null +++ b/sqlconnect/internal/base/db.go @@ -0,0 +1,129 @@ +package base + +import ( + "database/sql" + "fmt" + + "github.com/samber/lo" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func NewDB(db *sql.DB, rudderSchema string, opts ...Option) *DB { + d := &DB{ + DB: db, + Dialect: dialect{}, + columnTypeMapper: func(c ColumnType) string { + return c.DatabaseTypeName() + }, + jsonRowMapper: func(databaseTypeName string, value any) any { + return value + }, + rudderSchema: rudderSchema, + sqlCommands: SQLCommands{ + CreateSchema: func(schema string) string { return fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %[1]s", schema) }, + ListSchemas: func() (string, string) { + return "SELECT schema_name FROM information_schema.schemata", "schema_name" + }, + SchemaExists: func(schema string) string { + return fmt.Sprintf("SELECT schema_name FROM information_schema.schemata where schema_name = '%[1]s'", schema) + }, + DropSchema: func(schema string) string { return fmt.Sprintf("DROP SCHEMA %[1]s CASCADE", schema) }, + CreateTestTable: func(table string) string { + return fmt.Sprintf("CREATE TABLE IF NOT EXISTS %[1]s (c1 INT, c2 VARCHAR(255))", table) + }, + ListTables: func(schema string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SELECT table_name FROM information_schema.tables WHERE table_schema = '%[1]s'", schema), B: "table_name"}, + } + }, + ListTablesWithPrefix: func(schema, prefix string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SELECT table_name FROM information_schema.tables WHERE table_schema='%[1]s' AND table_name LIKE '%[2]s'", schema, prefix+"%"), B: "table_name"}, + } + }, + TableExists: func(schema, table string) string { + return fmt.Sprintf("SELECT table_name FROM information_schema.tables WHERE table_schema='%[1]s' and table_name = '%[2]s'", schema, table) + }, + ListColumns: func(schema, table string) (string, string, string) { + return fmt.Sprintf("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = '%[1]s' AND table_name = '%[2]s'", schema, table), "column_name", "data_type" + }, + CountTableRows: func(table string) string { return fmt.Sprintf("SELECT COUNT(*) FROM %[1]s", table) }, + DropTable: func(table string) string { return fmt.Sprintf("DROP TABLE IF EXISTS %[1]s", table) }, + TruncateTable: func(table string) string { return fmt.Sprintf("TRUNCATE TABLE %[1]s", table) }, + RenameTable: func(schema, oldName, newName string) string { + return fmt.Sprintf("ALTER TABLE %[1]s.%[2]s RENAME TO %[3]s", schema, oldName, newName) + }, + MoveTable: func(schema, oldName, newName string) string { + return fmt.Sprintf("CREATE TABLE %[1]s.%[3]s AS SELECT * FROM %[1]s.%[2]s", schema, oldName, newName) + }, + }, + } + for _, opt := range opts { + opt(d) + } + return d +} + +type DB struct { + *sql.DB + sqlconnect.Dialect + + rudderSchema string + columnTypeMapper func(ColumnType) string // map from database type to rudder type + jsonRowMapper func(databaseTypeName string, value any) any + sqlCommands SQLCommands +} + +type ColumnType interface { + DatabaseTypeName() string + DecimalSize() (precision, scale int64, ok bool) +} + +type colRefTypeAdapter struct { + sqlconnect.ColumnRef +} + +func (c colRefTypeAdapter) DatabaseTypeName() string { + return c.RawType +} + +func (c colRefTypeAdapter) DecimalSize() (precision, scale int64, ok bool) { + return 0, 0, false +} + +// SqlDB returns the underlying *sql.DB +func (db *DB) SqlDB() *sql.DB { + return db.DB +} + +type SQLCommands struct { + // Provides the SQL command to create a schema + CreateSchema func(schema string) string + // Provides the SQL command to list all schemas + ListSchemas func() (sql, columnName string) + // Provides the SQL command to check if a schema exists + SchemaExists func(schema string) string + // Provides the SQL command to drop a schema + DropSchema func(schema string) string + // Provides the SQL command to create a test table + CreateTestTable func(table string) string + // Provides the SQL command(s) to list all tables in a schema along with the column name that contains the table name in the result set + ListTables func(schema string) (sqlAndColumnNamePairs []lo.Tuple2[string, string]) + // Provides the SQL command(s) to list all tables in a schema with a prefix along with the column name that contains the table name in the result set + ListTablesWithPrefix func(schema, prefix string) []lo.Tuple2[string, string] + // Provides the SQL command to check if a table exists + TableExists func(schema, table string) string + // Provides the SQL command to list all columns in a table along with the column names in the result set that point to the name and type + ListColumns func(schema, table string) (sql, nameCol, typeCol string) + // Provides the SQL command to count the rows in a table + CountTableRows func(table string) string + // Provides the SQL command to drop a table + DropTable func(table string) string + // Provides the SQL command to truncate a table + TruncateTable func(table string) string + // Provides the SQL command to rename a table + RenameTable func(schema, oldName, newName string) string + // Provides the SQL command to move a table + MoveTable func(schema, oldName, newName string) string +} diff --git a/sqlconnect/internal/base/dbopts.go b/sqlconnect/internal/base/dbopts.go new file mode 100644 index 0000000..3614b36 --- /dev/null +++ b/sqlconnect/internal/base/dbopts.go @@ -0,0 +1,52 @@ +package base + +import ( + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +type Option func(*DB) + +// WithColumnTypeMappings sets the column type mappings for the client +func WithColumnTypeMappings(columnTypeMappings map[string]string) Option { + return func(db *DB) { + db.columnTypeMapper = func(c ColumnType) string { + if mappedType, ok := columnTypeMappings[strings.ToLower(c.DatabaseTypeName())]; ok { + return mappedType + } + if mappedType, ok := columnTypeMappings[strings.ToUpper(c.DatabaseTypeName())]; ok { + return mappedType + } + return c.DatabaseTypeName() + } + } +} + +// WithColumnTypeMapper sets the column type mapper for the client +func WithColumnTypeMapper(columnTypeMapper func(ColumnType) string) Option { + return func(db *DB) { + db.columnTypeMapper = columnTypeMapper + } +} + +// WithJsonRowMapper sets the json row mapper for the client +func WithJsonRowMapper(jsonRowMapper func(string, any) any) Option { + return func(db *DB) { + db.jsonRowMapper = jsonRowMapper + } +} + +// WithDialect sets the dialect for the client +func WithDialect(dialect sqlconnect.Dialect) Option { + return func(db *DB) { + db.Dialect = dialect + } +} + +// WithSQLCommandsOverride allows for overriding some of the sql commands that the client uses +func WithSQLCommandsOverride(override func(defaultCommands SQLCommands) SQLCommands) Option { + return func(db *DB) { + db.sqlCommands = override(db.sqlCommands) + } +} diff --git a/sqlconnect/internal/base/dialect.go b/sqlconnect/internal/base/dialect.go new file mode 100644 index 0000000..eaf3468 --- /dev/null +++ b/sqlconnect/internal/base/dialect.go @@ -0,0 +1,28 @@ +package base + +import ( + "fmt" + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +type dialect struct{} + +// QuoteTable quotes a table name +func (d dialect) QuoteTable(table sqlconnect.RelationRef) string { + if table.Schema != "" { + return d.QuoteIdentifier(table.Schema) + "." + d.QuoteIdentifier(table.Name) + } + return d.QuoteIdentifier(table.Name) +} + +// QuoteIdentifier quotes an identifier, e.g. a column name +func (d dialect) QuoteIdentifier(name string) string { + return fmt.Sprintf(`"%s"`, name) +} + +// FormatTableName formats a table name, typically by lower or upper casing it, depending on the database +func (d dialect) FormatTableName(name string) string { + return strings.ToLower(name) +} diff --git a/sqlconnect/internal/base/dialect_test.go b/sqlconnect/internal/base/dialect_test.go new file mode 100644 index 0000000..c9a97af --- /dev/null +++ b/sqlconnect/internal/base/dialect_test.go @@ -0,0 +1,30 @@ +package base + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestDialect(t *testing.T) { + var d dialect + t.Run("format table", func(t *testing.T) { + formatted := d.FormatTableName("TaBle") + require.Equal(t, "table", formatted, "table name should be lowercased") + }) + + t.Run("quote identifier", func(t *testing.T) { + quoted := d.QuoteIdentifier("column") + require.Equal(t, `"column"`, quoted, "column name should be quoted with double quotes") + }) + + t.Run("quote table", func(t *testing.T) { + quoted := d.QuoteTable(sqlconnect.NewRelationRef("table")) + require.Equal(t, `"table"`, quoted, "table name should be quoted with double quotes") + + quoted = d.QuoteTable(sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema"))) + require.Equal(t, `"schema"."table"`, quoted, "schema and table name should be quoted with double quotes") + }) +} diff --git a/sqlconnect/internal/base/mapper.go b/sqlconnect/internal/base/mapper.go new file mode 100644 index 0000000..8b9a04b --- /dev/null +++ b/sqlconnect/internal/base/mapper.go @@ -0,0 +1,10 @@ +package base + +import ( + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +// JSONRowMapper returns a row mapper that maps scanned rows to [map[string]any] +func (db *DB) JSONRowMapper() sqlconnect.RowMapper[map[string]any] { + return sqlconnect.JSONRowMapper(db.jsonRowMapper) +} diff --git a/sqlconnect/internal/base/schemaadmin.go b/sqlconnect/internal/base/schemaadmin.go new file mode 100644 index 0000000..25ec9d2 --- /dev/null +++ b/sqlconnect/internal/base/schemaadmin.go @@ -0,0 +1,92 @@ +package base + +import ( + "context" + "fmt" + "strings" + + "github.com/samber/lo" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +// GetRudderSchema returns the name of the rudder schema +func (db *DB) GetRudderSchema() string { + return db.rudderSchema +} + +// CreateSchema creates a schema +func (db *DB) CreateSchema(ctx context.Context, schema sqlconnect.SchemaRef) error { + if _, err := db.ExecContext(ctx, db.sqlCommands.CreateSchema(db.QuoteIdentifier(schema.Name))); err != nil { + return fmt.Errorf("creating schema %s: %w", schema, err) + } + return nil +} + +// ListSchemas returns a list of schemas +func (db *DB) ListSchemas(ctx context.Context) ([]sqlconnect.SchemaRef, error) { + var res []sqlconnect.SchemaRef + stmt, colName := db.sqlCommands.ListSchemas() + rows, err := db.QueryContext(ctx, stmt) + if err != nil { + return nil, fmt.Errorf("querying list schemas: %w", err) + } + defer func() { _ = rows.Close() }() + + cols, err := rows.Columns() + if err != nil { + return nil, fmt.Errorf("getting columns in list schemas: %w", err) + } + cols = lo.Map(cols, func(col string, _ int) string { return strings.ToLower(col) }) + var schema sqlconnect.SchemaRef + scanValues := make([]any, len(cols)) + if len(cols) == 1 { + scanValues[0] = &schema.Name + } else { + tableNameColIdx := lo.IndexOf(cols, strings.ToLower(colName)) + if tableNameColIdx == -1 { + return nil, fmt.Errorf("column %s not found in result set: %+v", colName, cols) + } + var otherCol sqlconnect.NilAny + for i := 0; i < len(cols); i++ { + if i == tableNameColIdx { + scanValues[i] = &schema.Name + } else { + scanValues[i] = &otherCol + } + } + } + for rows.Next() { + err = rows.Scan(scanValues...) + if err != nil { + return nil, fmt.Errorf("scanning list schemas: %w", err) + } + res = append(res, schema) + } + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("iterating list schemas: %w", err) + } + return res, nil +} + +// SchemaExists returns true if the schema exists +func (db *DB) SchemaExists(ctx context.Context, schemaRef sqlconnect.SchemaRef) (bool, error) { + rows, err := db.QueryContext(ctx, db.sqlCommands.SchemaExists(schemaRef.Name)) + if err != nil { + return false, fmt.Errorf("querying schema exists: %w", err) + } + defer func() { _ = rows.Close() }() + exists := rows.Next() + if err := rows.Err(); err != nil { + return false, fmt.Errorf("iterating schema exists: %w", err) + } + return exists, nil +} + +// DropSchema drops a schema +func (db *DB) DropSchema(ctx context.Context, schemaRef sqlconnect.SchemaRef) error { + if _, err := db.ExecContext(ctx, db.sqlCommands.DropSchema(db.QuoteIdentifier(schemaRef.Name))); err != nil { + return fmt.Errorf("dropping schema: %w", err) + } + return nil +} diff --git a/sqlconnect/internal/base/tableadmin.go b/sqlconnect/internal/base/tableadmin.go new file mode 100644 index 0000000..231b7ab --- /dev/null +++ b/sqlconnect/internal/base/tableadmin.go @@ -0,0 +1,267 @@ +package base + +import ( + "context" + "fmt" + "strings" + + "github.com/samber/lo" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +// CreateTestTable creates a test table +func (db *DB) CreateTestTable(ctx context.Context, table sqlconnect.RelationRef) error { + _, err := db.ExecContext(ctx, db.sqlCommands.CreateTestTable(db.QuoteTable(table))) + return err +} + +// ListTables returns a list of tables in the given schema +func (db *DB) ListTables(ctx context.Context, schema sqlconnect.SchemaRef) ([]sqlconnect.RelationRef, error) { + var res []sqlconnect.RelationRef + for _, tuple := range db.sqlCommands.ListTables(schema.Name) { + stmt := tuple.A + colName := tuple.B + rows, err := db.QueryContext(ctx, stmt) + if err != nil { + return nil, fmt.Errorf("querying list tables for schema %s: %w", schema, err) + } + defer func() { _ = rows.Close() }() + cols, err := rows.Columns() + if err != nil { + return nil, fmt.Errorf("getting columns in list tables for schema %s: %w", schema, err) + } + cols = lo.Map(cols, func(col string, _ int) string { return strings.ToLower(col) }) + var name string + scanValues := make([]any, len(cols)) + if len(cols) == 1 { + scanValues[0] = &name + } else { + tableNameColIdx := lo.IndexOf(cols, strings.ToLower(colName)) + if tableNameColIdx == -1 { + return nil, fmt.Errorf("column %s not found in result set: %+v", colName, cols) + } + var otherCol sqlconnect.NilAny + for i := 0; i < len(cols); i++ { + if i == tableNameColIdx { + scanValues[i] = &name + } else { + scanValues[i] = &otherCol + } + } + } + for rows.Next() { + err = rows.Scan(scanValues...) + if err != nil { + return nil, fmt.Errorf("scanning list tables: %w", err) + } + res = append(res, sqlconnect.NewRelationRef(name, sqlconnect.WithSchema(schema.Name))) + } + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("iterating list tables: %w", err) + } + } + + return res, nil +} + +// ListTablesWithPrefix returns a list of tables in the given schema that have the given prefix +func (db *DB) ListTablesWithPrefix(ctx context.Context, schema sqlconnect.SchemaRef, prefix string) ([]sqlconnect.RelationRef, error) { + var res []sqlconnect.RelationRef + for _, tuple := range db.sqlCommands.ListTablesWithPrefix(schema.Name, prefix) { + stmt := tuple.A + colName := tuple.B + rows, err := db.QueryContext(ctx, stmt) + if err != nil { + return nil, fmt.Errorf("querying list tables for schema %s with prefix %s: %w", schema, prefix, err) + } + defer func() { _ = rows.Close() }() + cols, err := rows.Columns() + if err != nil { + return nil, fmt.Errorf("getting columns in list tables for schema %s with prefix %s: %w", schema, prefix, err) + } + cols = lo.Map(cols, func(col string, _ int) string { return strings.ToLower(col) }) + var name string + scanValues := make([]any, len(cols)) + if len(cols) == 1 { + scanValues[0] = &name + } else { + tableNameColIdx := lo.IndexOf(cols, strings.ToLower(colName)) + if tableNameColIdx == -1 { + return nil, fmt.Errorf("column %s not found in result set: %+v", colName, cols) + } + var otherCol sqlconnect.NilAny + for i := 0; i < len(cols); i++ { + if i == tableNameColIdx { + scanValues[i] = &name + } else { + scanValues[i] = &otherCol + } + } + } + for rows.Next() { + if err := rows.Scan(scanValues...); err != nil { + return nil, fmt.Errorf("scanning list tables for schema %s with prefix %s: %w", schema, prefix, err) + } + res = append(res, sqlconnect.NewRelationRef(name, sqlconnect.WithSchema(schema.Name))) + } + // rows.Err will report the last error encountered by rows.Scan. + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("iterating list tables for schema %s with prefix %s: %w", schema, prefix, err) + } + } + return res, nil +} + +// TableExists returns true if the table exists +func (db *DB) TableExists(ctx context.Context, relation sqlconnect.RelationRef) (bool, error) { + stmt := db.sqlCommands.TableExists(relation.Schema, relation.Name) + rows, err := db.QueryContext(ctx, stmt) + if err != nil { + return false, fmt.Errorf("querying table %s exists: %w", relation, err) + } + defer func() { _ = rows.Close() }() + if rows.Next() { + return true, nil + } + if err := rows.Err(); err != nil { + return false, fmt.Errorf("iterating table %s exists: %w", relation, err) + } + return false, nil +} + +// ListColumns returns a list of columns for the given table +func (db *DB) ListColumns(ctx context.Context, relation sqlconnect.RelationRef) ([]sqlconnect.ColumnRef, error) { + var res []sqlconnect.ColumnRef + stmt, nameCol, typeCol := db.sqlCommands.ListColumns(relation.Schema, relation.Name) + columns, err := db.QueryContext(ctx, stmt) + if err != nil { + return nil, fmt.Errorf("querying list columns for %s: %w", relation.String(), err) + } + defer func() { _ = columns.Close() }() + cols, err := columns.Columns() + if err != nil { + return nil, fmt.Errorf("getting columns in list columns for %s: %w", relation.String(), err) + } + cols = lo.Map(cols, func(col string, _ int) string { return strings.ToLower(col) }) + + var column sqlconnect.ColumnRef + scanValues := make([]any, len(cols)) + nameColIdx := lo.IndexOf(cols, strings.ToLower(nameCol)) + if nameColIdx == -1 { + return nil, fmt.Errorf("column %s not found in result set: %+v", nameCol, cols) + } + typeColIdx := lo.IndexOf(cols, strings.ToLower(typeCol)) + if typeColIdx == -1 { + return nil, fmt.Errorf("column %s not found in result set: %+v", typeCol, cols) + } + var otherCol sqlconnect.NilAny + for i := 0; i < len(cols); i++ { + if i == nameColIdx { + scanValues[i] = &column.Name + } else if i == typeColIdx { + scanValues[i] = &column.RawType + } else { + scanValues[i] = &otherCol + } + } + + for columns.Next() { + if err := columns.Scan(scanValues...); err != nil { + return nil, fmt.Errorf("scanning list columns for %s: %w", relation.String(), err) + } + column.Type = db.columnTypeMapper(colRefTypeAdapter{column}) + res = append(res, column) + } + + if err := columns.Err(); err != nil { + return nil, fmt.Errorf("iterating list columns for %s: %w", relation.String(), err) + } + return res, nil +} + +// ListColumnsForSqlQuery returns a list of columns for the given sql query +func (db *DB) ListColumnsForSqlQuery(ctx context.Context, sql string) ([]sqlconnect.ColumnRef, error) { + var res []sqlconnect.ColumnRef + rows, err := db.DB.QueryContext(ctx, sql) + if err != nil { + return nil, fmt.Errorf("querying list columns for sql query: %w", err) + } + defer func() { _ = rows.Close() }() + + colTypes, err := rows.ColumnTypes() + if err != nil { + return nil, fmt.Errorf("getting column information in list columns for sql query: %w", err) + } + for _, col := range colTypes { + res = append(res, sqlconnect.ColumnRef{ + Name: col.Name(), + Type: db.columnTypeMapper(col), + RawType: col.DatabaseTypeName(), + }) + } + return res, nil +} + +// CountTableRows returns the number of rows in the given table +func (c *DB) CountTableRows(ctx context.Context, relation sqlconnect.RelationRef) (int, error) { + var count int + if err := c.QueryRowContext(ctx, c.sqlCommands.CountTableRows(c.QuoteTable(relation))).Scan(&count); err != nil { + return 0, fmt.Errorf("counting table rows for %s: %w", relation.String(), err) + } + return count, nil +} + +// DropTable drops a table +func (db *DB) DropTable(ctx context.Context, ref sqlconnect.RelationRef) error { + if _, err := db.ExecContext(ctx, db.sqlCommands.DropTable(db.QuoteTable(ref))); err != nil { + return fmt.Errorf("dropping table %s: %w", ref.String(), err) + } + return nil +} + +// TruncateTable truncates a table +func (db *DB) TruncateTable(ctx context.Context, ref sqlconnect.RelationRef) error { + if _, err := db.ExecContext(ctx, db.sqlCommands.TruncateTable(db.QuoteTable(ref))); err != nil { + return fmt.Errorf("truncating table %s: %w", ref.String(), err) + } + return nil +} + +// RenameTable renames a table +func (db *DB) RenameTable(ctx context.Context, oldRef, newRef sqlconnect.RelationRef) error { + if oldRef.Schema != newRef.Schema { + return fmt.Errorf("moving table to another schema not supported, oldRef: %s newRef: %s", oldRef, newRef) + } + if _, err := db.ExecContext(ctx, db.sqlCommands.RenameTable(db.QuoteIdentifier(oldRef.Schema), db.QuoteIdentifier(oldRef.Name), db.QuoteIdentifier(newRef.Name))); err != nil { + return fmt.Errorf("renaming table %s to %s: %w", oldRef.String(), newRef.String(), err) + } + return nil +} + +// MoveTable copies the old table's contents to the new table and drops the old table. Returns [ErrDropOldTablePostCopy] if the old table could not be dropped after the copy. +func (db *DB) MoveTable(ctx context.Context, oldRef, newRef sqlconnect.RelationRef) error { + if oldRef.Schema != newRef.Schema { + return fmt.Errorf("moving table to another schema not supported, oldRef: %s newRef: %s", oldRef, newRef) + } + if _, err := db.ExecContext(ctx, db.sqlCommands.MoveTable(db.QuoteIdentifier(oldRef.Schema), db.QuoteIdentifier(oldRef.Name), db.QuoteIdentifier(newRef.Name))); err != nil { + return fmt.Errorf("copying table %s contents to %s: %w", oldRef.String(), newRef.String(), err) + } + if err := db.DropTable(ctx, oldRef); err != nil { + return sqlconnect.ErrDropOldTablePostCopy + } + return nil +} + +// CreateTableFromQuery creates a table from the results of a query +func (db *DB) CreateTableFromQuery(ctx context.Context, table sqlconnect.RelationRef, query string) error { + _, err := db.ExecContext(ctx, fmt.Sprintf(`CREATE TABLE %[1]s as (%[2]s)`, db.QuoteTable(table), query)) + return err +} + +// GetRowCountForQuery returns the number of rows returned by the query +func (db *DB) GetRowCountForQuery(ctx context.Context, query string, params ...any) (int, error) { + var count int + err := db.QueryRowContext(ctx, query, params...).Scan(&count) + return count, err +} diff --git a/sqlconnect/internal/bigquery/config.go b/sqlconnect/internal/bigquery/config.go new file mode 100644 index 0000000..27463c0 --- /dev/null +++ b/sqlconnect/internal/bigquery/config.go @@ -0,0 +1,19 @@ +package bigquery + +import ( + "encoding/json" +) + +type Config struct { + ProjectID string `json:"project"` + CredentialsJSON string `json:"credentials"` + + // RudderSchema is used to override the default rudder schema name during tests + RudderSchema string `json:"rudderSchema"` + UseLegacyMappings bool `json:"useLegacyMappings"` +} + +// Parse parses the given JSON into the config +func (c *Config) Parse(configJSON json.RawMessage) error { + return json.Unmarshal(configJSON, c) +} diff --git a/sqlconnect/internal/bigquery/db.go b/sqlconnect/internal/bigquery/db.go new file mode 100644 index 0000000..2d2708b --- /dev/null +++ b/sqlconnect/internal/bigquery/db.go @@ -0,0 +1,108 @@ +package bigquery + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + + "cloud.google.com/go/bigquery" + "github.com/samber/lo" + "google.golang.org/api/option" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/bigquery/driver" +) + +const ( + DatabaseType = "bigquery" + defaultRudderSchema = "rudderstack_" +) + +// NewDB creates a new client +func NewDB(configJSON json.RawMessage) (*DB, error) { + var config Config + err := config.Parse(configJSON) + if err != nil { + return nil, err + } + + db := sql.OpenDB(driver.NewConnector(config.ProjectID, option.WithCredentialsJSON([]byte(config.CredentialsJSON)))) + + return &DB{ + DB: base.NewDB( + db, + lo.Ternary(config.RudderSchema != "", config.RudderSchema, defaultRudderSchema), + base.WithDialect(dialect{}), + base.WithColumnTypeMapper(getColumnTypeMapper(config)), + base.WithJsonRowMapper(getJonRowMapper(config)), + base.WithSQLCommandsOverride(func(cmds base.SQLCommands) base.SQLCommands { + cmds.CreateTestTable = func(table string) string { + return fmt.Sprintf("CREATE TABLE IF NOT EXISTS %[1]s (c1 INT, c2 STRING)", table) + } + cmds.ListTables = func(schema string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SELECT table_name FROM `%[1]s`.INFORMATION_SCHEMA.TABLES", schema), B: "table_name"}, + } + } + cmds.ListTablesWithPrefix = func(schema, prefix string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SELECT table_name FROM `%[1]s`.INFORMATION_SCHEMA.TABLES WHERE table_name LIKE '%[2]s'", schema, prefix+"%"), B: "table_name"}, + } + } + cmds.TableExists = func(schema, table string) string { + return fmt.Sprintf("SELECT table_name FROM `%[1]s`.INFORMATION_SCHEMA.TABLES WHERE table_name = '%[2]s'", schema, table) + } + cmds.ListColumns = func(schema, table string) (string, string, string) { + return fmt.Sprintf("SELECT column_name, data_type FROM `%[1]s`.INFORMATION_SCHEMA.COLUMNS WHERE table_name = '%[2]s'", schema, table), "column_name", "data_type" + } + + return cmds + }), + ), + }, nil +} + +func init() { + sqlconnect.RegisterDBFactory(DatabaseType, func(credentialsJSON json.RawMessage) (sqlconnect.DB, error) { + return NewDB(credentialsJSON) + }) +} + +type DB struct { + *base.DB +} + +// WithBigqueryClient runs the provided function by providing access to a native bigquery client, the underlying client that is used by the bigquery driver +func (db *DB) WithBigqueryClient(ctx context.Context, f func(*bigquery.Client) error) error { + sqlconn, err := db.Conn(ctx) + if err != nil { + return err + } + defer func() { _ = sqlconn.Close() }() + return sqlconn.Raw(func(driverConn any) error { + if c, ok := driverConn.(bqclient); ok { + return f(c.BigqueryClient()) + } + return fmt.Errorf("invalid driver connection") + }) +} + +type bqclient interface { + BigqueryClient() *bigquery.Client +} + +func getColumnTypeMapper(config Config) func(base.ColumnType) string { + if config.UseLegacyMappings { + return legacyColumnTypeMapper + } + return columnTypeMapper +} + +func getJonRowMapper(config Config) func(databaseTypeName string, value any) any { + if config.UseLegacyMappings { + return legacyJsonRowMapper + } + return jsonRowMapper +} diff --git a/sqlconnect/internal/bigquery/dialect.go b/sqlconnect/internal/bigquery/dialect.go new file mode 100644 index 0000000..71c9fde --- /dev/null +++ b/sqlconnect/internal/bigquery/dialect.go @@ -0,0 +1,27 @@ +package bigquery + +import ( + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +type dialect struct{} + +// QuoteTable quotes a table name +func (d dialect) QuoteTable(table sqlconnect.RelationRef) string { + if table.Schema != "" { + return d.QuoteIdentifier(table.Schema + "." + table.Name) + } + return d.QuoteIdentifier(table.Name) +} + +// QuoteIdentifier quotes an identifier, e.g. a column name +func (d dialect) QuoteIdentifier(name string) string { + return "`" + name + "`" +} + +// FormatTableName formats a table name, typically by lower or upper casing it, depending on the database +func (d dialect) FormatTableName(name string) string { + return strings.ToLower(name) +} diff --git a/sqlconnect/internal/bigquery/dialect_test.go b/sqlconnect/internal/bigquery/dialect_test.go new file mode 100644 index 0000000..3e245e9 --- /dev/null +++ b/sqlconnect/internal/bigquery/dialect_test.go @@ -0,0 +1,30 @@ +package bigquery + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestDialect(t *testing.T) { + var d dialect + t.Run("format table", func(t *testing.T) { + formatted := d.FormatTableName("TaBle") + require.Equal(t, "table", formatted, "table name should be lowercased") + }) + + t.Run("quote identifier", func(t *testing.T) { + quoted := d.QuoteIdentifier("column") + require.Equal(t, "`column`", quoted, "column name should be quoted with backticks") + }) + + t.Run("quote table", func(t *testing.T) { + quoted := d.QuoteTable(sqlconnect.NewRelationRef("table")) + require.Equal(t, "`table`", quoted, "table name should be quoted with backticks") + + quoted = d.QuoteTable(sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema"))) + require.Equal(t, "`schema.table`", quoted, "schema and table name should be quoted with backticks") + }) +} diff --git a/sqlconnect/internal/bigquery/driver/columns.go b/sqlconnect/internal/bigquery/driver/columns.go new file mode 100644 index 0000000..d6e8c0c --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/columns.go @@ -0,0 +1,71 @@ +package driver + +import ( + "database/sql/driver" + + "cloud.google.com/go/bigquery" +) + +type bigQuerySchema interface { + ColumnNames() []string + ConvertColumnValue(index int, value bigquery.Value) (driver.Value, error) + ColumnTypeDatabaseTypeName(index int) string +} + +type bigQueryColumns struct { + names []string + columns []bigQueryColumn +} + +func (columns bigQueryColumns) ConvertColumnValue(index int, value bigquery.Value) (driver.Value, error) { + if index > -1 && len(columns.columns) > index { + column := columns.columns[index] + return column.ConvertValue(value) + } + + return value, nil +} + +func (columns bigQueryColumns) ColumnNames() []string { + return columns.names +} + +func (columns bigQueryColumns) ColumnTypeDatabaseTypeName(index int) string { + if index > -1 && len(columns.columns) > index { + column := columns.columns[index] + if column.FieldSchema.Repeated { + return "ARRAY" + } + return string(column.FieldSchema.Type) + } + + return "" +} + +type bigQueryColumn struct { + Name string + FieldSchema *bigquery.FieldSchema +} + +func (column bigQueryColumn) ConvertValue(value bigquery.Value) (driver.Value, error) { + return value, nil +} + +func createBigQuerySchema(schema bigquery.Schema) bigQuerySchema { + var names []string + var columns []bigQueryColumn + for _, column := range schema { + + name := column.Name + + names = append(names, name) + columns = append(columns, bigQueryColumn{ + Name: name, + FieldSchema: column, + }) + } + return &bigQueryColumns{ + names, + columns, + } +} diff --git a/sqlconnect/internal/bigquery/driver/connection.go b/sqlconnect/internal/bigquery/driver/connection.go new file mode 100644 index 0000000..3b1764a --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/connection.go @@ -0,0 +1,78 @@ +package driver + +import ( + "context" + "database/sql/driver" + "errors" + "fmt" + + "cloud.google.com/go/bigquery" + "google.golang.org/api/iterator" +) + +type bigQueryConnection struct { + ctx context.Context + client *bigquery.Client + closed bool + bad bool +} + +func (connection *bigQueryConnection) GetContext() context.Context { + return connection.ctx +} + +func (connection *bigQueryConnection) Ping(ctx context.Context) error { + datasets := connection.client.Datasets(ctx) + if _, err := datasets.Next(); err != nil && !errors.Is(err, iterator.Done) { + return err + } + return nil +} + +func (connection *bigQueryConnection) QueryContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Rows, error) { + statement := &bigQueryStatement{connection, query} + return statement.QueryContext(ctx, args) +} + +func (connection *bigQueryConnection) Query(query string, args []driver.Value) (driver.Rows, error) { + return nil, driver.ErrSkip +} + +func (connection *bigQueryConnection) Prepare(query string) (driver.Stmt, error) { + statement := &bigQueryStatement{connection, query} + + return statement, nil +} + +func (connection *bigQueryConnection) Close() error { + if connection.closed { + return nil + } + if connection.bad { + return driver.ErrBadConn + } + connection.closed = true + return connection.client.Close() +} + +func (connection *bigQueryConnection) Begin() (driver.Tx, error) { + return nil, fmt.Errorf("bigquery: transactions are not supported") +} + +func (connection *bigQueryConnection) query(query string) (*bigquery.Query, error) { + return connection.client.Query(query), nil +} + +func (connection *bigQueryConnection) ExecContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Result, error) { + statement := &bigQueryStatement{connection, query} + return statement.ExecContext(ctx, args) +} + +func (bigQueryConnection) CheckNamedValue(*driver.NamedValue) error { + return nil +} + +// BigqueryClient returns the underlying bigquery.Client (for those hard to reach places...) +func (connection *bigQueryConnection) BigqueryClient() *bigquery.Client { + return connection.client +} diff --git a/sqlconnect/internal/bigquery/driver/connector.go b/sqlconnect/internal/bigquery/driver/connector.go new file mode 100644 index 0000000..a45b7ba --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/connector.go @@ -0,0 +1,40 @@ +package driver + +import ( + "context" + "database/sql/driver" + + "cloud.google.com/go/bigquery" + "google.golang.org/api/option" +) + +func NewConnector(projectID string, opts ...option.ClientOption) driver.Connector { + return &bigQueryConnector{ + projectID: projectID, + opts: opts, + } +} + +type bigQueryConnector struct { + projectID string + opts []option.ClientOption +} + +func (c *bigQueryConnector) Connect(ctx context.Context) (driver.Conn, error) { + client, err := bigquery.NewClient(ctx, c.projectID, c.opts...) + if err != nil { + return nil, err + } + + return &bigQueryConnection{ + ctx: ctx, + client: client, + }, nil +} + +// Driver returns the underlying Driver of the Connector, +// mainly to maintain compatibility with the Driver method +// on sql.DB. +func (c *bigQueryConnector) Driver() driver.Driver { + return &bigQueryDriver{} +} diff --git a/sqlconnect/internal/bigquery/driver/driver.go b/sqlconnect/internal/bigquery/driver/driver.go new file mode 100644 index 0000000..1d8e58e --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/driver.go @@ -0,0 +1,90 @@ +package driver + +import ( + "context" + "database/sql/driver" + "fmt" + "net/url" + "strings" + + "cloud.google.com/go/bigquery" + "google.golang.org/api/option" +) + +type bigQueryDriver struct{} + +type bigQueryConfig struct { + projectID string + scopes []string + endpoint string + disableAuth bool + credentialFile string + credentialsJSON string +} + +func (b bigQueryDriver) Open(uri string) (driver.Conn, error) { + config, err := configFromUri(uri) + if err != nil { + return nil, err + } + + ctx := context.Background() + + opts := []option.ClientOption{option.WithScopes(config.scopes...)} + if config.endpoint != "" { + opts = append(opts, option.WithEndpoint(config.endpoint)) + } + if config.disableAuth { + opts = append(opts, option.WithoutAuthentication()) + } + if config.credentialFile != "" { + opts = append(opts, option.WithCredentialsFile(config.credentialFile)) + } + if config.credentialsJSON != "" { + opts = append(opts, option.WithCredentialsJSON([]byte(config.credentialsJSON))) + } + + client, err := bigquery.NewClient(ctx, config.projectID, opts...) + if err != nil { + return nil, err + } + + return &bigQueryConnection{ + ctx: ctx, + client: client, + }, nil +} + +func configFromUri(uri string) (*bigQueryConfig, error) { + u, err := url.Parse(uri) + if err != nil { + return nil, invalidConnectionStringError(uri) + } + + if u.Scheme != "bigquery" { + return nil, fmt.Errorf("invalid prefix, expected bigquery:// got: %s", uri) + } + + config := &bigQueryConfig{ + projectID: u.Hostname(), + scopes: getScopes(u.Query()), + endpoint: u.Query().Get("endpoint"), + disableAuth: u.Query().Get("disable_auth") == "true", + credentialFile: u.Query().Get("credential_file"), + credentialsJSON: u.Query().Get("credentials_json"), + } + + return config, nil +} + +func getScopes(query url.Values) []string { + q := strings.Trim(query.Get("scopes"), ",") + if q == "" { + return []string{} + } + return strings.Split(q, ",") +} + +func invalidConnectionStringError(uri string) error { + return fmt.Errorf("invalid connection string: %s", uri) +} diff --git a/sqlconnect/internal/bigquery/driver/driver_test.go b/sqlconnect/internal/bigquery/driver/driver_test.go new file mode 100644 index 0000000..66b427b --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/driver_test.go @@ -0,0 +1,228 @@ +package driver_test + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "net/url" + "os" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/require" + "google.golang.org/api/option" + + "github.com/rudderlabs/rudder-go-kit/testhelper/rand" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/bigquery/driver" +) + +func TestBigqueryDriver(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + t.Cleanup(cancel) + + configJSON, ok := os.LookupEnv("BIGQUERY_TEST_ENVIRONMENT_CREDENTIALS") + if !ok { + t.Skip("skipping bigquery driver test due to lack of a test environment") + } + var c config + require.NoError(t, json.Unmarshal([]byte(configJSON), &c)) + + t.Run("OpenDB", func(t *testing.T) { + db := sql.OpenDB(driver.NewConnector(c.ProjectID, option.WithCredentialsJSON([]byte(c.CredentialsJSON)))) + t.Cleanup(func() { + require.NoError(t, db.Close(), "it should be able to close the database connection") + }) + }) + + q := url.Values{"credentials_json": []string{c.CredentialsJSON}} + urn := url.URL{Scheme: "bigquery", Host: c.ProjectID, RawQuery: q.Encode()} + db, err := sql.Open("bigquery", urn.String()) + require.NoError(t, err, "it should be able to open the database connection") + t.Cleanup(func() { + require.NoError(t, db.Close(), "it should be able to close the database connection") + }) + + schema := GenerateTestSchema() + + t.Run("Ping", func(t *testing.T) { + require.NoError(t, db.Ping(), "it should be able to ping the database") + require.NoError(t, db.PingContext(ctx), "it should be able to ping the database using a context") + }) + + t.Run("Transaction unsupported", func(t *testing.T) { + t.Run("Begin", func(t *testing.T) { + _, err := db.Begin() + require.Error(t, err, "it should not be able to begin a transaction") + }) + + t.Run("BeginTx", func(t *testing.T) { + _, err := db.BeginTx(ctx, nil) + require.Error(t, err, "it should not be able to begin a transaction") + }) + }) + t.Run("Exec", func(t *testing.T) { + _, err := db.Exec(fmt.Sprintf("CREATE SCHEMA `%s`", schema)) + require.NoError(t, err, "it should be able to create a schema") + }) + + t.Run("ExecContext", func(t *testing.T) { + _, err := db.ExecContext(ctx, fmt.Sprintf("CREATE TABLE `%s`.`test_table` (C1 INT, C2 ARRAY)", schema)) + require.NoError(t, err, "it should be able to create a table") + }) + + t.Run("prepared statement", func(t *testing.T) { + t.Run("QueryRow", func(t *testing.T) { + stmt, err := db.Prepare(fmt.Sprintf("SELECT COUNT(*) FROM `%s`.`test_table`", schema)) + require.NoError(t, err, "it should be able to prepare a statement") + defer func() { + require.NoError(t, stmt.Close(), "it should be able to close the prepared statement") + }() + + var count int + err = stmt.QueryRow().Scan(&count) + require.NoError(t, err, "it should be able to execute a prepared statement") + }) + + t.Run("Exec", func(t *testing.T) { + stmt, err := db.Prepare(fmt.Sprintf("INSERT INTO `%s`.`test_table` (C1) VALUES (?)", schema)) + require.NoError(t, err, "it should be able to prepare a statement") + defer func() { + require.NoError(t, stmt.Close(), "it should be able to close the prepared statement") + }() + result, err := stmt.Exec(1) + require.NoError(t, err, "it should be able to execute a prepared statement") + + _, err = result.LastInsertId() + require.Error(t, err, "last insert id not supported") + + rowsAffected, err := result.RowsAffected() + require.NoError(t, err, "it should be able to get rows affected") + require.EqualValues(t, 0, rowsAffected, "rows affected should be 0 (not supported)") + }) + + t.Run("Query", func(t *testing.T) { + stmt, err := db.Prepare(fmt.Sprintf("SELECT C1 FROM `%s`.`test_table` WHERE C1 = ?", schema)) + require.NoError(t, err, "it should be able to prepare a statement") + defer func() { + require.NoError(t, stmt.Close(), "it should be able to close the prepared statement") + }() + rows, err := stmt.Query(1) + require.NoError(t, err, "it should be able to execute a prepared statement") + defer func() { + require.NoError(t, rows.Close(), "it should be able to close the rows") + }() + require.True(t, rows.Next(), "it should be able to get a row") + var c1 int + err = rows.Scan(&c1) + require.NoError(t, err, "it should be able to scan the row") + require.EqualValues(t, 1, c1, "it should be able to get the correct value") + require.False(t, rows.Next(), "it shouldn't have next row") + + require.NoError(t, rows.Err()) + }) + + t.Run("Query with named parameters", func(t *testing.T) { + stmt, err := db.PrepareContext(ctx, fmt.Sprintf("SELECT C1, C2 FROM `%s`.`test_table` WHERE C1 = @c1_value", schema)) + require.NoError(t, err, "it should be able to prepare a statement") + defer func() { + require.NoError(t, stmt.Close(), "it should be able to close the prepared statement") + }() + rows, err := stmt.QueryContext(ctx, sql.Named("c1_value", 1)) + require.NoError(t, err, "it should be able to execute a prepared statement") + defer func() { + require.NoError(t, rows.Close(), "it should be able to close the rows") + }() + + cols, err := rows.Columns() + require.NoError(t, err, "it should be able to get the columns") + require.EqualValues(t, []string{"C1", "C2"}, cols, "it should be able to get the correct columns") + + colTypes, err := rows.ColumnTypes() + require.NoError(t, err, "it should be able to get the column types") + require.Len(t, colTypes, 2, "it should be able to get the correct number of column types") + require.EqualValues(t, "INTEGER", colTypes[0].DatabaseTypeName(), "it should be able to get the correct column type") + require.EqualValues(t, "ARRAY", colTypes[1].DatabaseTypeName(), "it should be able to get the correct column type") + + require.True(t, rows.Next(), "it should be able to get a row") + var c1 int + var c2 any + err = rows.Scan(&c1, &c2) + require.NoError(t, err, "it should be able to scan the row") + require.EqualValues(t, 1, c1, "it should be able to get the correct value") + require.Nil(t, c2, "it should be able to get the correct value") + require.False(t, rows.Next(), "it shouldn't have next row") + + require.NoError(t, rows.Err()) + }) + }) + + t.Run("query", func(t *testing.T) { + t.Run("QueryRow", func(t *testing.T) { + var count int + err := db.QueryRow(fmt.Sprintf("SELECT COUNT(*) FROM `%s`.`test_table`", schema)).Scan(&count) + require.NoError(t, err, "it should be able to execute a prepared statement") + require.Equal(t, 1, count, "it should be able to get the correct value") + }) + + t.Run("Exec", func(t *testing.T) { + result, err := db.Exec(fmt.Sprintf("INSERT INTO `%s`.`test_table` (C1) VALUES (?)", schema), 2) + require.NoError(t, err, "it should be able to execute a prepared statement") + rowsAffected, err := result.RowsAffected() + require.NoError(t, err, "it should be able to get rows affected") + require.EqualValues(t, 0, rowsAffected, "rows affected should be 0 (not supported)") + }) + + t.Run("Query", func(t *testing.T) { + rows, err := db.Query(fmt.Sprintf("SELECT C1 FROM `%s`.`test_table` WHERE C1 = ?", schema), 2) + require.NoError(t, err, "it should be able to execute a prepared statement") + defer func() { + require.NoError(t, rows.Close(), "it should be able to close the rows") + }() + require.True(t, rows.Next(), "it should be able to get a row") + var c1 int + err = rows.Scan(&c1) + require.NoError(t, err, "it should be able to scan the row") + require.EqualValues(t, 2, c1, "it should be able to get the correct value") + require.False(t, rows.Next(), "it shouldn't have next row") + + require.NoError(t, rows.Err()) + }) + + t.Run("Query with named parameters", func(t *testing.T) { + rows, err := db.QueryContext(ctx, fmt.Sprintf("SELECT C1 FROM `%s`.`test_table` WHERE C1 = @c1_value", schema), sql.Named("c1_value", 2)) + require.NoError(t, err, "it should be able to execute a prepared statement") + defer func() { + require.NoError(t, rows.Close(), "it should be able to close the rows") + }() + + cols, err := rows.Columns() + require.NoError(t, err, "it should be able to get the columns") + require.EqualValues(t, []string{"C1"}, cols, "it should be able to get the correct columns") + + colTypes, err := rows.ColumnTypes() + require.NoError(t, err, "it should be able to get the column types") + require.Len(t, colTypes, 1, "it should be able to get the correct number of column types") + require.EqualValues(t, "INTEGER", colTypes[0].DatabaseTypeName(), "it should be able to get the correct column type") + + require.True(t, rows.Next(), "it should be able to get a row") + var c1 int + err = rows.Scan(&c1) + require.NoError(t, err, "it should be able to scan the row") + require.EqualValues(t, 2, c1, "it should be able to get the correct value") + require.False(t, rows.Next(), "it shouldn't have next row") + + require.NoError(t, rows.Err()) + }) + }) +} + +type config struct { + ProjectID string `json:"project"` + CredentialsJSON string `json:"credentials"` +} + +func GenerateTestSchema() string { + return strings.ToLower(fmt.Sprintf("tbqdrv_%s_%d", rand.String(12), time.Now().Unix())) +} diff --git a/sqlconnect/internal/bigquery/driver/init.go b/sqlconnect/internal/bigquery/driver/init.go new file mode 100644 index 0000000..cf42b1e --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/init.go @@ -0,0 +1,9 @@ +package driver + +import ( + "database/sql" +) + +func init() { + sql.Register("bigquery", &bigQueryDriver{}) +} diff --git a/sqlconnect/internal/bigquery/driver/result.go b/sqlconnect/internal/bigquery/driver/result.go new file mode 100644 index 0000000..26e3e81 --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/result.go @@ -0,0 +1,19 @@ +package driver + +import ( + "errors" + + "cloud.google.com/go/bigquery" +) + +type bigQueryResult struct { + rowIterator *bigquery.RowIterator +} + +func (result *bigQueryResult) LastInsertId() (int64, error) { + return 0, errors.New("LastInsertId is not supported") +} + +func (result *bigQueryResult) RowsAffected() (int64, error) { + return int64(result.rowIterator.TotalRows), nil +} diff --git a/sqlconnect/internal/bigquery/driver/rows.go b/sqlconnect/internal/bigquery/driver/rows.go new file mode 100644 index 0000000..a50d7c4 --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/rows.go @@ -0,0 +1,58 @@ +package driver + +import ( + "database/sql/driver" + "io" + + "google.golang.org/api/iterator" +) + +type bigQueryRows struct { + source bigQuerySource + schema bigQuerySchema +} + +func (rows *bigQueryRows) ensureSchema() { + if rows.schema == nil { + rows.schema = rows.source.GetSchema() + } +} + +func (rows *bigQueryRows) Columns() []string { + rows.ensureSchema() + return rows.schema.ColumnNames() +} + +func (rows *bigQueryRows) Close() error { + return nil +} + +func (rows *bigQueryRows) Next(dest []driver.Value) error { + rows.ensureSchema() + + values, err := rows.source.Next() + if err == iterator.Done { + return io.EOF + } + + if err != nil { + return err + } + + length := len(values) + for i := range dest { + if i < length { + dest[i], err = rows.schema.ConvertColumnValue(i, values[i]) + if err != nil { + return err + } + } + } + + return nil +} + +func (rows *bigQueryRows) ColumnTypeDatabaseTypeName(index int) string { + rows.ensureSchema() + return rows.schema.ColumnTypeDatabaseTypeName(index) +} diff --git a/sqlconnect/internal/bigquery/driver/source.go b/sqlconnect/internal/bigquery/driver/source.go new file mode 100644 index 0000000..9233b39 --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/source.go @@ -0,0 +1,56 @@ +package driver + +import ( + "cloud.google.com/go/bigquery" +) + +type bigQuerySource interface { + GetSchema() bigQuerySchema + Next() ([]bigquery.Value, error) +} + +type bigQueryRowIteratorSource struct { + iterator *bigquery.RowIterator + prevValues map[string]bigquery.Value + prevError error +} + +func (source *bigQueryRowIteratorSource) GetSchema() bigQuerySchema { + return createBigQuerySchema(source.iterator.Schema) +} + +func (source *bigQueryRowIteratorSource) Next() ([]bigquery.Value, error) { + // Using a map[string]bigquery.Value instead of a []bigquery.Value for properly mapping structs. + // If we were to use a slice, structs would be mapped as an array of values, e.g. [value1, value2, ...] + // instead of {field1: value1, field2: value2, ...} + var values map[string]bigquery.Value + var err error + if source.prevValues != nil || source.prevError != nil { + values = source.prevValues + err = source.prevError + source.prevValues = nil + source.prevError = nil + } else { + err = source.iterator.Next(&values) + } + var res []bigquery.Value + if err != nil { + return res, err + } + res = make([]bigquery.Value, len(source.iterator.Schema)) + for i, s := range source.iterator.Schema { + res[i] = values[s.Name] + } + return res, err +} + +func createSourceFromRowIterator(rowIterator *bigquery.RowIterator) bigQuerySource { + source := &bigQueryRowIteratorSource{ + iterator: rowIterator, + } + // Call RowIterator.Next once so that calls to source.iterator.Schema will return values + if source.iterator != nil { + source.prevError = source.iterator.Next(&source.prevValues) + } + return source +} diff --git a/sqlconnect/internal/bigquery/driver/statement.go b/sqlconnect/internal/bigquery/driver/statement.go new file mode 100644 index 0000000..4055287 --- /dev/null +++ b/sqlconnect/internal/bigquery/driver/statement.go @@ -0,0 +1,133 @@ +package driver + +import ( + "context" + "database/sql/driver" + "regexp" + "strings" + + "cloud.google.com/go/bigquery" + "github.com/samber/lo" + "github.com/sirupsen/logrus" +) + +var namedParamsRegexp = regexp.MustCompile(`@[\w]+`) + +type bigQueryStatement struct { + connection *bigQueryConnection + query string +} + +func (statement bigQueryStatement) Close() error { + return nil +} + +func (statement bigQueryStatement) NumInput() int { + params := strings.Count(statement.query, "?") + if params > 0 { + return params + } + uniqueMatches := lo.Uniq(namedParamsRegexp.FindAllString(statement.query, -1)) + return len(uniqueMatches) +} + +func (bigQueryStatement) CheckNamedValue(*driver.NamedValue) error { + return nil +} + +func (statement *bigQueryStatement) ExecContext(ctx context.Context, args []driver.NamedValue) (driver.Result, error) { + query, err := statement.buildQuery(convertParameters(args)) + if err != nil { + return nil, err + } + + rowIterator, err := query.Read(ctx) + if err != nil { + return nil, err + } + + return &bigQueryResult{rowIterator}, nil +} + +func (statement *bigQueryStatement) QueryContext(ctx context.Context, args []driver.NamedValue) (driver.Rows, error) { + query, err := statement.buildQuery(convertParameters(args)) + if err != nil { + return nil, err + } + + rowIterator, err := query.Read(context.Background()) + if err != nil { + return nil, err + } + + return &bigQueryRows{ + source: createSourceFromRowIterator(rowIterator), + }, nil +} + +func (statement bigQueryStatement) Exec(args []driver.Value) (driver.Result, error) { + return nil, driver.ErrSkip +} + +func (statement bigQueryStatement) Query(args []driver.Value) (driver.Rows, error) { + return nil, driver.ErrSkip +} + +func (statement bigQueryStatement) buildQuery(args []driver.Value) (*bigquery.Query, error) { + query, err := statement.connection.query(statement.query) + if err != nil { + return nil, err + } + query.Parameters, err = statement.buildParameters(args) + if err != nil { + return nil, err + } + + return query, err +} + +func (statement bigQueryStatement) buildParameters(args []driver.Value) ([]bigquery.QueryParameter, error) { + if args == nil { + return nil, nil + } + + var parameters []bigquery.QueryParameter + for _, arg := range args { + parameters = buildParameter(arg, parameters) + } + return parameters, nil +} + +func buildParameter(arg driver.Value, parameters []bigquery.QueryParameter) []bigquery.QueryParameter { + namedValue, ok := arg.(driver.NamedValue) + if ok { + return buildParameterFromNamedValue(namedValue, parameters) + } + + logrus.Debugf("-param:%s", arg) + + return append(parameters, bigquery.QueryParameter{ + Value: arg, + }) +} + +func buildParameterFromNamedValue(namedValue driver.NamedValue, parameters []bigquery.QueryParameter) []bigquery.QueryParameter { + if namedValue.Name == "" { + return append(parameters, bigquery.QueryParameter{ + Value: namedValue.Value, + }) + } else { + return append(parameters, bigquery.QueryParameter{ + Name: namedValue.Name, + Value: namedValue.Value, + }) + } +} + +func convertParameters(args []driver.NamedValue) []driver.Value { + var values []driver.Value + for _, arg := range args { + values = append(values, arg) + } + return values +} diff --git a/sqlconnect/internal/bigquery/integration_test.go b/sqlconnect/internal/bigquery/integration_test.go new file mode 100644 index 0000000..eb495bb --- /dev/null +++ b/sqlconnect/internal/bigquery/integration_test.go @@ -0,0 +1,18 @@ +package bigquery_test + +import ( + "os" + "strings" + "testing" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/bigquery" + integrationtest "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/integration_test" +) + +func TestBigqueryDB(t *testing.T) { + configJSON, ok := os.LookupEnv("BIGQUERY_TEST_ENVIRONMENT_CREDENTIALS") + if !ok { + t.Skip("skipping bigquery integration test due to lack of a test environment") + } + integrationtest.TestDatabaseScenarios(t, bigquery.DatabaseType, []byte(configJSON), strings.ToLower, integrationtest.Options{LegacySupport: true}) +} diff --git a/sqlconnect/internal/bigquery/legacy_mappings.go b/sqlconnect/internal/bigquery/legacy_mappings.go new file mode 100644 index 0000000..6084bc5 --- /dev/null +++ b/sqlconnect/internal/bigquery/legacy_mappings.go @@ -0,0 +1,62 @@ +package bigquery + +import ( + "math/big" + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +func legacyColumnTypeMapper(columnType base.ColumnType) string { + columnType.DatabaseTypeName() + columnTypeMappings := map[string]string{ + "BOOLEAN": "boolean", + "BOOL": "boolean", + "INTEGER": "int", + "INT64": "int", + "INT": "int", + "SMALLINT": "int", + "TINYINT": "int", + "BIGINT": "int", + "NUMERIC": "float", + "BIGNUMERIC": "float", + "FLOAT": "float", + "FLOAT64": "float", + "DECIMAL": "float", + "BIGDECIMAL": "float", + "STRING": "string", + "BYTES": "string", + "DATE": "datetime", + "DATETIME": "datetime", + "TIME": "datetime", + "TIMESTAMP": "datetime", + } + databaseTypeName := strings.ToUpper(re.ReplaceAllString(columnType.DatabaseTypeName(), "")) + if mappedType, ok := columnTypeMappings[strings.ToUpper(databaseTypeName)]; ok { + return mappedType + } + if databaseTypeName == "ARRAY" { + return "array" + } + if databaseTypeName == "STRUCT" { + return "RECORD" + } + return databaseTypeName +} + +// legacyJsonRowMapper maps a row's scanned column to a json object's field +func legacyJsonRowMapper(_ string, value any) any { + switch v := (value).(type) { + case *big.Rat: + // Handle big.Rat values + if !v.IsInt() { + floatVal, _ := v.Float64() + return floatVal + } else { + return v.Num().Int64() + } + default: + // Handle other data types as is + return v + } +} diff --git a/sqlconnect/internal/bigquery/mappings.go b/sqlconnect/internal/bigquery/mappings.go new file mode 100644 index 0000000..1749b95 --- /dev/null +++ b/sqlconnect/internal/bigquery/mappings.go @@ -0,0 +1,95 @@ +package bigquery + +import ( + "encoding/json" + "math/big" + "regexp" + "strings" + "time" + + "cloud.google.com/go/bigquery" + "cloud.google.com/go/civil" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +// mapping of database column types to rudder types +var columnTypeMappings = map[string]string{ + "BOOLEAN": "boolean", + "BOOL": "boolean", + + "INT64": "int", // INT64 and aliases + "INT": "int", + "SMALLINT": "int", + "INTEGER": "int", + "BIGINT": "int", + "TINYINT": "int", + "BYTEINT": "int", + + "INTERVAL": "int", + + "NUMERIC": "float", // NUMERIC and aliases + "DECIMAL": "float", + + "BIGNUMERIC": "float", // BIGNUMERIC and aliases + "BIGDECIMAL": "float", + + "FLOAT": "float", + "FLOAT64": "float", + + "STRING": "string", + "BYTES": "string", + "GEOGRAPHY": "string", + "TIME": "datetime", + + "DATE": "datetime", + "DATETIME": "datetime", + "TIMESTAMP": "datetime", + + "JSON": "json", + "ARRAY": "json", + "STRUCT": "json", // STRUCT and RECORD are represented as an array of json objects + "RECORD": "json", +} + +var re = regexp.MustCompile(`(\(.+\)|<.+>)`) // remove type parameters [<>] and size constraints [()] + +func columnTypeMapper(columnType base.ColumnType) string { + databaseTypeName := strings.ToUpper(re.ReplaceAllString(columnType.DatabaseTypeName(), "")) + if mappedType, ok := columnTypeMappings[strings.ToUpper(databaseTypeName)]; ok { + return mappedType + } + return databaseTypeName +} + +// jsonRowMapper maps a row's scanned column to a json object's field +func jsonRowMapper(databaseTypeName string, value any) any { + switch v := (value).(type) { + case *big.Rat: + if !v.IsInt() { + floatVal, _ := v.Float64() + return floatVal + } else { + return v.Num().Int64() + } + case civil.Date: + return time.Date(int(v.Year), time.Month(v.Month), int(v.Day), 0, 0, 0, 0, time.UTC) + case civil.Time: + return time.Date(0, 1, 1, int(v.Hour), int(v.Minute), int(v.Second), int(v.Nanosecond), time.UTC) + case civil.DateTime: + return time.Date(int(v.Date.Year), time.Month(v.Date.Month), int(v.Date.Day), int(v.Time.Hour), int(v.Time.Minute), int(v.Time.Second), int(v.Time.Nanosecond), time.UTC) + case *bigquery.IntervalValue: + return v.ToDuration() + case []uint8: + return string(v) + case string: + switch databaseTypeName { + case "JSON": + return json.RawMessage(v) + } + return v + default: + // Handle other data types as is + return v + } +} diff --git a/sqlconnect/internal/bigquery/schemaadmin.go b/sqlconnect/internal/bigquery/schemaadmin.go new file mode 100644 index 0000000..b10b2b8 --- /dev/null +++ b/sqlconnect/internal/bigquery/schemaadmin.go @@ -0,0 +1,57 @@ +package bigquery + +import ( + "context" + "errors" + + "cloud.google.com/go/bigquery" + "google.golang.org/api/googleapi" + "google.golang.org/api/iterator" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +// SchemaExists uses the bigquery client instead of [INFORMATION_SCHEMA.SCHEMATA] due to absence of a region qualifier +// https://cloud.google.com/bigquery/docs/information-schema-datasets-schemata#scope_and_syntax +func (db *DB) SchemaExists(ctx context.Context, schemaRef sqlconnect.SchemaRef) (bool, error) { + var exists bool + if err := db.WithBigqueryClient(ctx, func(c *bigquery.Client) error { + if _, err := c.Dataset(schemaRef.Name).Metadata(ctx); err != nil { + var e *googleapi.Error + if ok := errors.As(err, &e); ok { + if e.Code == 404 { // not found + return nil + } + } + return err + } + exists = true + return nil + }); err != nil { + return false, err + } + return exists, nil +} + +// ListSchemas uses the bigquery client instead of [INFORMATION_SCHEMA.SCHEMATA] due to absence of a region qualifier +// https://cloud.google.com/bigquery/docs/information-schema-datasets-schemata#scope_and_syntax +func (db *DB) ListSchemas(ctx context.Context) ([]sqlconnect.SchemaRef, error) { + var schemas []sqlconnect.SchemaRef + if err := db.WithBigqueryClient(ctx, func(c *bigquery.Client) error { + datasets := c.Datasets(ctx) + for { + var dataset *bigquery.Dataset + dataset, err := datasets.Next() + if err != nil { + if err == iterator.Done { + return nil + } + return err + } + schemas = append(schemas, sqlconnect.SchemaRef{Name: dataset.DatasetID}) + } + }); err != nil { + return nil, err + } + return schemas, nil +} diff --git a/sqlconnect/internal/bigquery/testdata/column-mapping-test-columns.json b/sqlconnect/internal/bigquery/testdata/column-mapping-test-columns.json new file mode 100644 index 0000000..49f1b0a --- /dev/null +++ b/sqlconnect/internal/bigquery/testdata/column-mapping-test-columns.json @@ -0,0 +1,29 @@ +{ + "_order": "int", + "_array": "json", + "_bignumeric": "float", + "_bignumericnoscale": "float", + "_bigdecimal": "float", + "_bool": "boolean", + "_boolean": "boolean", + "_bytes": "string", + "_date": "datetime", + "_datetime": "datetime", + "_float64": "float", + "_geo": "string", + "_int64": "int", + "_int": "int", + "_smallint": "int", + "_integer": "int", + "_bigint": "int", + "_tinyint": "int", + "_byteint": "int", + "_interval": "int", + "_json": "json", + "_numeric": "float", + "_decimal": "float", + "_string": "string", + "_struct": "json", + "_time": "datetime", + "_timestamp": "datetime" +} \ No newline at end of file diff --git a/sqlconnect/internal/bigquery/testdata/column-mapping-test-rows.json b/sqlconnect/internal/bigquery/testdata/column-mapping-test-rows.json new file mode 100644 index 0000000..21abf64 --- /dev/null +++ b/sqlconnect/internal/bigquery/testdata/column-mapping-test-rows.json @@ -0,0 +1,89 @@ +[ + { + "_order": 1, + "_array": ["ONE"], + "_bignumeric": 1.1, + "_bignumericnoscale": 1, + "_bigdecimal": 1, + "_bool": true, + "_boolean": true, + "_bytes": "abc", + "_date": "2014-09-27T00:00:00Z", + "_datetime": "2014-09-27T12:30:00.45Z", + "_float64": 1.1, + "_geo": "POINT(32 90)", + "_int64": 1, + "_int": 1, + "_smallint": 1, + "_integer": 1, + "_bigint": 1, + "_tinyint": 1, + "_byteint": 1, + "_interval": 31104000000000000, + "_json": {"key": "value"}, + "_numeric": 1, + "_decimal": 1, + "_string": "string", + "_struct": { "a": "string", "b": 1}, + "_time": "0000-01-01T12:30:00.45Z", + "_timestamp": "2014-09-27T20:30:00.45Z" + }, + { + "_order": 2, + "_array": null, + "_bignumeric": 0, + "_bignumericnoscale": 0, + "_bigdecimal": 0, + "_bool": false, + "_boolean": false, + "_bytes": "", + "_date": "2014-09-27T00:00:00Z", + "_datetime": "2014-09-27T12:30:00.45Z", + "_float64": 0, + "_geo": "GEOMETRYCOLLECTION EMPTY", + "_int64": 0, + "_int": 0, + "_smallint": 0, + "_integer": 0, + "_bigint": 0, + "_tinyint": 0, + "_byteint": 0, + "_interval": 31104000000000000, + "_json": {}, + "_numeric": 0, + "_decimal": 0, + "_string": "", + "_struct": { "a": "", "b": 0}, + "_time": "0000-01-01T12:30:00.45Z", + "_timestamp": "2014-09-27T20:30:00.45Z" + }, + { + "_order": 3, + "_array": null, + "_bignumeric": null, + "_bignumericnoscale": null, + "_bigdecimal": null, + "_bool": null, + "_boolean": null, + "_bytes": null, + "_date": null, + "_datetime": null, + "_float64": null, + "_geo": null, + "_int64": null, + "_int": null, + "_smallint": null, + "_integer": null, + "_bigint": null, + "_tinyint": null, + "_byteint": null, + "_interval": null, + "_json": null, + "_numeric": null, + "_decimal": null, + "_string": null, + "_struct": null, + "_time": null, + "_timestamp": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/bigquery/testdata/column-mapping-test-seed.sql b/sqlconnect/internal/bigquery/testdata/column-mapping-test-seed.sql new file mode 100644 index 0000000..44a0479 --- /dev/null +++ b/sqlconnect/internal/bigquery/testdata/column-mapping-test-seed.sql @@ -0,0 +1,36 @@ +CREATE TABLE `{{.schema}}`.`column_mappings_test` ( + _order INT64, + _array ARRAY, + _bignumeric BIGNUMERIC(2,1), + _bignumericnoscale BIGNUMERIC(1,0), + _bigdecimal BIGDECIMAL, + _bool BOOL, + _boolean BOOLEAN, + _bytes BYTES, + _date DATE, + _datetime DATETIME, + _float64 FLOAT64, + _geo GEOGRAPHY, + _int64 INT64, + _int INT, + _smallint SMALLINT, + _integer INTEGER, + _bigint BIGINT, + _tinyint TINYINT, + _byteint BYTEINT, + _interval INTERVAL, + _json JSON, + _numeric NUMERIC, + _decimal NUMERIC, + _string STRING(10), + _struct STRUCT, + _time TIME, + _timestamp TIMESTAMP, +); + +INSERT INTO `{{.schema}}`.`column_mappings_test` + (_order, _array, _bignumeric, _bignumericnoscale, _bigdecimal, _bool, _boolean, _bytes, _date, _datetime, _float64, _geo, _int64, _int, _smallint, _integer, _bigint, _tinyint, _byteint, _interval, _json, _numeric, _decimal, _string, _struct, _time, _timestamp) +VALUES + (1, ['ONE'], 1.1, 1, 1, TRUE, TRUE, B"abc", '2014-09-27', '2014-09-27 12:30:00.45', 1.1, ST_GEOGFROMTEXT('POINT(32 90)'), 1, 1, 1, 1, 1, 1, 1, INTERVAL 1 YEAR, JSON '{"key": "value"}', 1, 1, 'string', ('string', 1), '12:30:00.45', '2014-09-27 12:30:00.45-08'), + (2, [], 0.0, 0, 0, FALSE, FALSE, B"", '2014-09-27', '2014-09-27 12:30:00.45', 0.0, ST_GEOGFROMTEXT('POINT EMPTY'), 0, 0, 0, 0, 0, 0, 0, INTERVAL 1 YEAR, JSON '{}', 0, 0, '', ('', 0), '12:30:00.45', '2014-09-27 12:30:00.45-08'), + (3, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL); \ No newline at end of file diff --git a/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-sql.json b/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-sql.json new file mode 100644 index 0000000..06829fe --- /dev/null +++ b/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-sql.json @@ -0,0 +1,29 @@ +{ + "_order": "int", + "_array": "array", + "_bignumeric": "float", + "_bignumericnoscale": "float", + "_bigdecimal": "float", + "_bool": "boolean", + "_boolean": "boolean", + "_bytes": "string", + "_date": "datetime", + "_datetime": "datetime", + "_float64": "float", + "_geo": "GEOGRAPHY", + "_int64": "int", + "_int": "int", + "_smallint": "int", + "_integer": "int", + "_bigint": "int", + "_tinyint": "int", + "_byteint": "int", + "_interval": "INTERVAL", + "_json": "JSON", + "_numeric": "float", + "_decimal": "float", + "_string": "string", + "_struct": "RECORD", + "_time": "datetime", + "_timestamp": "datetime" +} \ No newline at end of file diff --git a/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-table.json b/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-table.json new file mode 100644 index 0000000..06829fe --- /dev/null +++ b/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-columns-table.json @@ -0,0 +1,29 @@ +{ + "_order": "int", + "_array": "array", + "_bignumeric": "float", + "_bignumericnoscale": "float", + "_bigdecimal": "float", + "_bool": "boolean", + "_boolean": "boolean", + "_bytes": "string", + "_date": "datetime", + "_datetime": "datetime", + "_float64": "float", + "_geo": "GEOGRAPHY", + "_int64": "int", + "_int": "int", + "_smallint": "int", + "_integer": "int", + "_bigint": "int", + "_tinyint": "int", + "_byteint": "int", + "_interval": "INTERVAL", + "_json": "JSON", + "_numeric": "float", + "_decimal": "float", + "_string": "string", + "_struct": "RECORD", + "_time": "datetime", + "_timestamp": "datetime" +} \ No newline at end of file diff --git a/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-rows.json b/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-rows.json new file mode 100644 index 0000000..34bb158 --- /dev/null +++ b/sqlconnect/internal/bigquery/testdata/legacy-column-mapping-test-rows.json @@ -0,0 +1,105 @@ +[ + { + "_order": 1, + "_array": ["ONE"], + "_bignumeric": 1.1, + "_bignumericnoscale": 1, + "_bigdecimal": 1, + "_bool": true, + "_boolean": true, + "_bytes": "YWJj", + "_date": "2014-09-27", + "_datetime": "2014-09-27T12:30:00.450000000", + "_float64": 1.1, + "_geo": "POINT(32 90)", + "_int64": 1, + "_int": 1, + "_smallint": 1, + "_integer": 1, + "_bigint": 1, + "_tinyint": 1, + "_byteint": 1, + "_interval": { + "Days": 0, + "Hours": 0, + "Months": 0, + "Minutes": 0, + "Seconds": 0, + "SubSecondNanos": 0, + "Years": 1 + }, + "_json": "{\"key\":\"value\"}", + "_numeric": 1, + "_decimal": 1, + "_string": "string", + "_struct": { "a": "string", "b": 1}, + "_time": "12:30:00.450000000", + "_timestamp": "2014-09-27T20:30:00.45Z" + }, + { + "_order": 2, + "_array": null, + "_bignumeric": 0, + "_bignumericnoscale": 0, + "_bigdecimal": 0, + "_bool": false, + "_boolean": false, + "_bytes": "", + "_date": "2014-09-27", + "_datetime": "2014-09-27T12:30:00.450000000", + "_float64": 0, + "_geo": "GEOMETRYCOLLECTION EMPTY", + "_int64": 0, + "_int": 0, + "_smallint": 0, + "_integer": 0, + "_bigint": 0, + "_tinyint": 0, + "_byteint": 0, + "_interval": { + "Days": 0, + "Hours": 0, + "Months": 0, + "Minutes": 0, + "Seconds": 0, + "SubSecondNanos": 0, + "Years": 1 + }, + "_json": "{}", + "_numeric": 0, + "_decimal": 0, + "_string": "", + "_struct": { "a": "", "b": 0}, + "_time": "12:30:00.450000000", + "_timestamp": "2014-09-27T20:30:00.45Z" + }, + { + "_order": 3, + "_array": null, + "_bignumeric": null, + "_bignumericnoscale": null, + "_bigdecimal": null, + "_bool": null, + "_boolean": null, + "_bytes": null, + "_date": null, + "_datetime": null, + "_float64": null, + "_geo": null, + "_int64": null, + "_int": null, + "_smallint": null, + "_integer": null, + "_bigint": null, + "_tinyint": null, + "_byteint": null, + "_interval": null, + "_json": null, + "_numeric": null, + "_decimal": null, + "_string": null, + "_struct": null, + "_time": null, + "_timestamp": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/databricks/config.go b/sqlconnect/internal/databricks/config.go new file mode 100644 index 0000000..158725f --- /dev/null +++ b/sqlconnect/internal/databricks/config.go @@ -0,0 +1,35 @@ +package databricks + +import ( + "encoding/json" + "time" +) + +type Config struct { + Host string `json:"host"` + Port int `json:"port"` + Path string `json:"path"` + Token string `json:"token"` + Catalog string `json:"catalog"` + + RetryAttempts int `json:"retryAttempts"` + MinRetryWaitTime time.Duration `json:"minRetryWaitTime"` + MaxRetryWaitTime time.Duration `json:"maxRetryWaitTime"` + MaxConnIdleTime time.Duration `json:"maxConnIdleTime"` + + // RudderSchema is used to override the default rudder schema name during tests + RudderSchema string `json:"rudderSchema"` + UseLegacyMappings bool `json:"useLegacyMappings"` +} + +func (c *Config) Parse(configJson json.RawMessage) error { + err := json.Unmarshal(configJson, c) + if err != nil { + return err + } + // if catalog is empty from the UI, use default "hive_metastore" + if c.Catalog == "" { + c.Catalog = "hive_metastore" + } + return nil +} diff --git a/sqlconnect/internal/databricks/db.go b/sqlconnect/internal/databricks/db.go new file mode 100644 index 0000000..9656d81 --- /dev/null +++ b/sqlconnect/internal/databricks/db.go @@ -0,0 +1,107 @@ +package databricks + +import ( + "database/sql" + "encoding/json" + "fmt" + + databricks "github.com/databricks/databricks-sql-go" + "github.com/samber/lo" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +const ( + DatabaseType = "databricks" + defaultRudderSchema = "_rudderstack" + userAgentEntry = "Rudderstack" +) + +// NewDB creates a new postgres-specific client +func NewDB(configJson json.RawMessage) (*DB, error) { + var config Config + err := config.Parse(configJson) + if err != nil { + return nil, err + } + + connector, err := databricks.NewConnector( + databricks.WithAccessToken(config.Token), + databricks.WithServerHostname(config.Host), + databricks.WithPort(config.Port), + databricks.WithHTTPPath(config.Path), + databricks.WithInitialNamespace(config.Catalog, ""), + databricks.WithRetries( + config.RetryAttempts, + config.MinRetryWaitTime, + config.MaxRetryWaitTime, + ), + databricks.WithUserAgentEntry(userAgentEntry), + ) + if err != nil { + return nil, err + } + + db := sql.OpenDB(connector) + db.SetConnMaxIdleTime(config.MaxConnIdleTime) + + return &DB{ + DB: base.NewDB( + db, + lo.Ternary(config.RudderSchema != "", config.RudderSchema, defaultRudderSchema), + base.WithDialect(dialect{}), + base.WithColumnTypeMapper(getColumnTypeMapper(config)), + base.WithJsonRowMapper(getJonRowMapper(config)), + base.WithSQLCommandsOverride(func(cmds base.SQLCommands) base.SQLCommands { + cmds.ListSchemas = func() (string, string) { return "SHOW SCHEMAS", "schema_name" } + cmds.SchemaExists = func(schema string) string { return fmt.Sprintf(`SHOW SCHEMAS LIKE '%s'`, schema) } + + cmds.CreateTestTable = func(table string) string { + return fmt.Sprintf("CREATE TABLE IF NOT EXISTS %[1]s (c1 INT, c2 STRING)", table) + } + cmds.ListTables = func(schema string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SHOW TABLES IN %s", schema), B: "tableName"}, + } + } + cmds.ListTablesWithPrefix = func(schema, prefix string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SHOW TABLES IN %[1]s LIKE '%[2]s'", schema, prefix+"*"), B: "tableName"}, + } + } + cmds.TableExists = func(schema, table string) string { + return fmt.Sprintf("SHOW TABLES IN %[1]s LIKE '%[2]s'", schema, table) + } + cmds.ListColumns = func(schema, table string) (string, string, string) { + return fmt.Sprintf("DESCRIBE TABLE `%[1]s`.`%[2]s`", schema, table), "col_name", "data_type" + } + return cmds + }), + ), + }, nil +} + +func init() { + sqlconnect.RegisterDBFactory(DatabaseType, func(credentialsJSON json.RawMessage) (sqlconnect.DB, error) { + return NewDB(credentialsJSON) + }) +} + +type DB struct { + *base.DB +} + +func getColumnTypeMapper(config Config) func(base.ColumnType) string { + if config.UseLegacyMappings { + return legacyColumnTypeMapper + } + return columnTypeMapper +} + +func getJonRowMapper(config Config) func(databaseTypeName string, value any) any { + if config.UseLegacyMappings { + return legacyJsonRowMapper + } + return jsonRowMapper +} diff --git a/sqlconnect/internal/databricks/dialect.go b/sqlconnect/internal/databricks/dialect.go new file mode 100644 index 0000000..2003de0 --- /dev/null +++ b/sqlconnect/internal/databricks/dialect.go @@ -0,0 +1,27 @@ +package databricks + +import ( + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +type dialect struct{} + +// QuoteTable quotes a table name +func (d dialect) QuoteTable(table sqlconnect.RelationRef) string { + if table.Schema != "" { + return d.QuoteIdentifier(table.Schema) + "." + d.QuoteIdentifier(table.Name) + } + return d.QuoteIdentifier(table.Name) +} + +// QuoteIdentifier quotes an identifier, e.g. a column name +func (d dialect) QuoteIdentifier(name string) string { + return "`" + name + "`" +} + +// FormatTableName formats a table name, typically by lower or upper casing it, depending on the database +func (d dialect) FormatTableName(name string) string { + return strings.ToLower(name) +} diff --git a/sqlconnect/internal/databricks/dialect_test.go b/sqlconnect/internal/databricks/dialect_test.go new file mode 100644 index 0000000..33f9866 --- /dev/null +++ b/sqlconnect/internal/databricks/dialect_test.go @@ -0,0 +1,30 @@ +package databricks + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestDialect(t *testing.T) { + var d dialect + t.Run("format table", func(t *testing.T) { + formatted := d.FormatTableName("TaBle") + require.Equal(t, "table", formatted, "table name should be lowercased") + }) + + t.Run("quote identifier", func(t *testing.T) { + quoted := d.QuoteIdentifier("column") + require.Equal(t, "`column`", quoted, "column name should be quoted with backticks") + }) + + t.Run("quote table", func(t *testing.T) { + quoted := d.QuoteTable(sqlconnect.NewRelationRef("table")) + require.Equal(t, "`table`", quoted, "table name should be quoted with backticks") + + quoted = d.QuoteTable(sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema"))) + require.Equal(t, "`schema`.`table`", quoted, "schema and table name should be quoted with backticks") + }) +} diff --git a/sqlconnect/internal/databricks/integration_test.go b/sqlconnect/internal/databricks/integration_test.go new file mode 100644 index 0000000..a4a43e1 --- /dev/null +++ b/sqlconnect/internal/databricks/integration_test.go @@ -0,0 +1,30 @@ +package databricks_test + +import ( + "os" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/require" + "github.com/tidwall/sjson" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/databricks" + integrationtest "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/integration_test" +) + +func TestDatabricksDB(t *testing.T) { + configJSON, ok := os.LookupEnv("DATABRICKS_TEST_ENVIRONMENT_CREDENTIALS") + if !ok { + t.Skip("skipping databricks integration test due to lack of a test environment") + } + + configJSON, err := sjson.Set(configJSON, "retryAttempts", 4) + require.NoError(t, err, "failed to set retryAttempts") + configJSON, err = sjson.Set(configJSON, "minRetryWaitTime", time.Second) + require.NoError(t, err, "failed to set minRetryWaitTime") + configJSON, err = sjson.Set(configJSON, "maxRetryWaitTime", 30*time.Second) + require.NoError(t, err, "failed to set maxRetryWaitTime") + + integrationtest.TestDatabaseScenarios(t, databricks.DatabaseType, []byte(configJSON), strings.ToLower, integrationtest.Options{LegacySupport: true}) +} diff --git a/sqlconnect/internal/databricks/legacy_mappings.go b/sqlconnect/internal/databricks/legacy_mappings.go new file mode 100644 index 0000000..c9d9019 --- /dev/null +++ b/sqlconnect/internal/databricks/legacy_mappings.go @@ -0,0 +1,47 @@ +package databricks + +import ( + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +func legacyColumnTypeMapper(columnType base.ColumnType) string { + columnType.DatabaseTypeName() + columnTypeMappings := map[string]string{ + "DECIMAL": "int", + "NUMERIC": "int", + "DEC": "int", + "INT": "int", + "BIGINT": "int", + "SMALLINT": "int", + "TINYINT": "int", + "FLOAT": "float", + "DOUBLE": "float", + "BOOLEAN": "boolean", + "STRING": "string", + "BINARY": "string", + "DATE": "datetime", + "INTERVAL": "datetime", + "VOID": "string", + "TIMESTAMP": "datetime", + "TIMESTAMP_NTZ": "datetime", + "ARRAY": "json", + "MAP": "json", + "STRUCT": "json", + } + databaseTypeName := strings.ToUpper(re.ReplaceAllString(columnType.DatabaseTypeName(), "")) + if mappedType, ok := columnTypeMappings[strings.ToUpper(databaseTypeName)]; ok { + return mappedType + } + return databaseTypeName +} + +// legacyJsonRowMapper maps a row's scanned column to a json object's field +func legacyJsonRowMapper(_ string, value any) any { + switch v := value.(type) { + case []uint8: + return string(v) + } + return value +} diff --git a/sqlconnect/internal/databricks/mappings.go b/sqlconnect/internal/databricks/mappings.go new file mode 100644 index 0000000..782721c --- /dev/null +++ b/sqlconnect/internal/databricks/mappings.go @@ -0,0 +1,82 @@ +package databricks + +import ( + "encoding/json" + "regexp" + "strconv" + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +// mapping of database column types to rudder types +var columnTypeMappings = map[string]string{ + "DECIMAL": "float", // DECIMAL and aliases + "NUMERIC": "float", + "DEC": "float", + + "INT": "int", // INT and aliases + "INTEGER": "int", + + "BIGINT": "int", // BIGINT and aliases + "LONG": "int", + + "SMALLINT": "int", // SMALLINT and aliases + "SHORT": "int", + + "TINYINT": "int", // TINYINT and aliases + "BYTE": "int", + + "FLOAT": "float", // FLOAT and aliases + "REAL": "float", + + "DOUBLE": "float", + "BOOLEAN": "boolean", + "STRING": "string", + "BINARY": "string", + "DATE": "datetime", + "INTERVAL": "datetime", + "VOID": "string", + "TIMESTAMP": "datetime", + "TIMESTAMP_NTZ": "datetime", + + "ARRAY": "json", + "MAP": "json", + "STRUCT": "json", +} + +var re = regexp.MustCompile(`(\(.+\)|<.+>)`) // remove type parameters [<>] and size constraints [()] + +func columnTypeMapper(columnType base.ColumnType) string { + databaseTypeName := strings.ToUpper(re.ReplaceAllString(columnType.DatabaseTypeName(), "")) + if mappedType, ok := columnTypeMappings[strings.ToUpper(databaseTypeName)]; ok { + return mappedType + } + return databaseTypeName +} + +// jsonRowMapper maps a row's scanned column to a json object's field +func jsonRowMapper(databaseTypeName string, value any) any { + switch v := value.(type) { + case []uint8: + return string(v) + case string: + switch databaseTypeName { + case "DECIMAL": + // convert to float + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + return f + case "ARRAY", "STRUCT", "MAP": // convert string to json + var j any + err := json.Unmarshal([]byte(v), &j) + if err != nil { + return v + } + return j + } + } + return value +} diff --git a/sqlconnect/internal/databricks/tableadmin.go b/sqlconnect/internal/databricks/tableadmin.go new file mode 100644 index 0000000..6b130e7 --- /dev/null +++ b/sqlconnect/internal/databricks/tableadmin.go @@ -0,0 +1,20 @@ +package databricks + +import ( + "context" + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +// RenameTable in databricks falls back to MoveTable if rename is not supported +func (db *DB) RenameTable(ctx context.Context, oldRef, newRef sqlconnect.RelationRef) error { + if err := db.DB.RenameTable(ctx, oldRef, newRef); err != nil { + // move table if rename is not supported + if strings.Contains(err.Error(), "DELTA_ALTER_TABLE_RENAME_NOT_ALLOWED") { + return db.MoveTable(ctx, oldRef, newRef) + } + return err + } + return nil +} diff --git a/sqlconnect/internal/databricks/testdata/column-mapping-test-columns.json b/sqlconnect/internal/databricks/testdata/column-mapping-test-columns.json new file mode 100644 index 0000000..47ce096 --- /dev/null +++ b/sqlconnect/internal/databricks/testdata/column-mapping-test-columns.json @@ -0,0 +1,26 @@ +{ + "_order": "int", + "_decimal": "float", + "_numeric": "float", + "_dec": "float", + "_int": "int", + "_integer": "int", + "_bigint": "int", + "_long": "int", + "_smallint": "int", + "_short": "int", + "_tinyint": "int", + "_byte": "int", + "_float": "float", + "_real": "float", + "_double": "float", + "_boolean": "boolean", + "_string": "string", + "_binary": "string", + "_date": "datetime", + "_timestamp": "datetime", + "_timestampntz": "datetime", + "_array": "json", + "_map": "json", + "_struct": "json" +} diff --git a/sqlconnect/internal/databricks/testdata/column-mapping-test-rows.json b/sqlconnect/internal/databricks/testdata/column-mapping-test-rows.json new file mode 100644 index 0000000..a70aee3 --- /dev/null +++ b/sqlconnect/internal/databricks/testdata/column-mapping-test-rows.json @@ -0,0 +1,83 @@ +[ + { + "_order": 1, + "_decimal": 1.1, + "_numeric": 1.1, + "_dec": 1.1, + "_int": 1, + "_integer": 1, + "_bigint": 1, + "_long": 1, + "_smallint": 1, + "_short": 1, + "_tinyint": 1, + "_byte": 1, + "_float": 1.1, + "_real": 1.1, + "_double": 1.1, + "_boolean": true, + "_string": "s", + "_binary": "\u0001", + "_date": "2020-12-31T00:00:00Z", + "_timestamp": "2021-07-01T05:43:28Z", + "_timestampntz": "2021-07-01T08:43:28.123456Z", + "_array": [1,2,3,null], + "_map": {"key":"value", "key1":null}, + "_struct": {"col1": "val1", "col2": 1} + }, + { + "_order": 2, + "_decimal": 0, + "_numeric": 0, + "_dec": 0, + "_int": 0, + "_integer": 0, + "_bigint": 0, + "_long": 0, + "_smallint": 0, + "_short": 0, + "_tinyint": 0, + "_byte": 0, + "_float": 0, + "_real": 0, + "_double": 0, + "_boolean": false, + "_string": "", + "_binary": "", + "_date": "2020-12-31T00:00:00Z", + "_timestamp": "2021-07-01T05:43:28Z", + "_timestampntz": "2021-07-01T08:43:28.123456Z", + "_array": [], + "_map": {"": ""}, + "_struct": {"col1": "val1", "col2": null} + }, + { + "_order": 3, + "_decimal": null, + "_numeric": null, + "_dec": null, + "_int": null, + "_integer": null, + "_bigint": null, + "_long": null, + "_smallint": null, + "_short": null, + "_tinyint": null, + "_byte": null, + "_float": null, + "_real": null, + "_double": null, + "_boolean": null, + "_string": null, + "_binary": null, + "_date": null, + "_timestamp": null, + "_timestampntz": null, + "_array": null, + "_map": null, + "_struct": { + "col1": null, + "col2": null + } + } +] \ No newline at end of file diff --git a/sqlconnect/internal/databricks/testdata/column-mapping-test-seed.sql b/sqlconnect/internal/databricks/testdata/column-mapping-test-seed.sql new file mode 100644 index 0000000..40a6bd0 --- /dev/null +++ b/sqlconnect/internal/databricks/testdata/column-mapping-test-seed.sql @@ -0,0 +1,33 @@ +CREATE TABLE `{{.schema}}`.`column_mappings_test` ( + _order INT, + _decimal DECIMAL(2,1), + _numeric NUMERIC(2,1), + _dec DEC(2,1), + _int INT, + _integer INTEGER, + _bigint BIGINT, + _long LONG, + _smallint SMALLINT, + _short SHORT, + _tinyint TINYINT, + _byte BYTE, + _float FLOAT, + _real REAL, + _double DOUBLE, + _boolean BOOLEAN, + _string STRING, + _binary BINARY, + _date DATE, + _timestamp TIMESTAMP, + _timestampntz TIMESTAMP_NTZ, + _array ARRAY, + _map MAP, + _struct STRUCT +); + +INSERT INTO `{{.schema}}`.`column_mappings_test` + (_order, _decimal, _numeric, _dec, _int, _integer, _bigint, _long, _smallint, _short, _tinyint, _byte, _float, _real, _double, _boolean, _string, _binary, _date, _timestamp, _timestampntz, _array, _map, _struct) +VALUES + (1, 1.1, 1.1, 1.1, 1, 1, 1, 1, 1, 1, 1, 1, 1.1, 1.1, 1.1, true, 's', X'1', CAST('2020-12-31' AS DATE), '2021-7-1T8:43:28UTC+3', '2021-7-1T8:43:28.123456', ARRAY(1,2,3,NULL), map('key', 'value', 'key1', NULL), struct('val1', 1) ), + (2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, '', X'', CAST('2020-12-31' AS DATE), '2021-7-1T8:43:28UTC+3', '2021-7-1T8:43:28.123456', ARRAY(), map('',''), struct('val1', NULL) ), + (3, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ); \ No newline at end of file diff --git a/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-sql.json b/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-sql.json new file mode 100644 index 0000000..274fc49 --- /dev/null +++ b/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-sql.json @@ -0,0 +1,26 @@ +{ + "_order": "int", + "_decimal": "int", + "_numeric": "int", + "_dec": "int", + "_int": "int", + "_integer": "int", + "_bigint": "int", + "_long": "int", + "_smallint": "int", + "_short": "int", + "_tinyint": "int", + "_byte": "int", + "_float": "float", + "_real": "float", + "_double": "float", + "_boolean": "boolean", + "_string": "string", + "_binary": "string", + "_date": "datetime", + "_timestamp": "datetime", + "_timestampntz": "datetime", + "_array": "json", + "_map": "json", + "_struct": "json" +} diff --git a/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-table.json b/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-table.json new file mode 100644 index 0000000..274fc49 --- /dev/null +++ b/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-columns-table.json @@ -0,0 +1,26 @@ +{ + "_order": "int", + "_decimal": "int", + "_numeric": "int", + "_dec": "int", + "_int": "int", + "_integer": "int", + "_bigint": "int", + "_long": "int", + "_smallint": "int", + "_short": "int", + "_tinyint": "int", + "_byte": "int", + "_float": "float", + "_real": "float", + "_double": "float", + "_boolean": "boolean", + "_string": "string", + "_binary": "string", + "_date": "datetime", + "_timestamp": "datetime", + "_timestampntz": "datetime", + "_array": "json", + "_map": "json", + "_struct": "json" +} diff --git a/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-rows.json b/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-rows.json new file mode 100644 index 0000000..d1a4789 --- /dev/null +++ b/sqlconnect/internal/databricks/testdata/legacy-column-mapping-test-rows.json @@ -0,0 +1,80 @@ +[ + { + "_order": 1, + "_decimal": "1.1", + "_numeric": "1.1", + "_dec": "1.1", + "_int": 1, + "_integer": 1, + "_bigint": 1, + "_long": 1, + "_smallint": 1, + "_short": 1, + "_tinyint": 1, + "_byte": 1, + "_float": 1.1, + "_real": 1.1, + "_double": 1.1, + "_boolean": true, + "_string": "s", + "_binary": "\u0001", + "_date": "2020-12-31T00:00:00Z", + "_timestamp": "2021-07-01T05:43:28Z", + "_timestampntz": "2021-07-01T08:43:28.123456Z", + "_array": "[1,2,3,null]", + "_map": "{\"key\":\"value\",\"key1\":null}", + "_struct": "{\"col1\":\"val1\",\"col2\":1}" + }, + { + "_order": 2, + "_decimal": "0.0", + "_numeric": "0.0", + "_dec": "0.0", + "_int": 0, + "_integer": 0, + "_bigint": 0, + "_long": 0, + "_smallint": 0, + "_short": 0, + "_tinyint": 0, + "_byte": 0, + "_float": 0, + "_real": 0, + "_double": 0, + "_boolean": false, + "_string": "", + "_binary": "", + "_date": "2020-12-31T00:00:00Z", + "_timestamp": "2021-07-01T05:43:28Z", + "_timestampntz": "2021-07-01T08:43:28.123456Z", + "_array": "[]", + "_map": "{\"\":\"\"}", + "_struct": "{\"col1\":\"val1\",\"col2\":null}" + }, + { + "_order": 3, + "_decimal": null, + "_numeric": null, + "_dec": null, + "_int": null, + "_integer": null, + "_bigint": null, + "_long": null, + "_smallint": null, + "_short": null, + "_tinyint": null, + "_byte": null, + "_float": null, + "_real": null, + "_double": null, + "_boolean": null, + "_string": null, + "_binary": null, + "_date": null, + "_timestamp": null, + "_timestampntz": null, + "_array": null, + "_map": null, + "_struct": "{\"col1\":null,\"col2\":null}" + } +] \ No newline at end of file diff --git a/sqlconnect/internal/integration_test/db_integration_test_scenario.go b/sqlconnect/internal/integration_test/db_integration_test_scenario.go new file mode 100644 index 0000000..fbdc632 --- /dev/null +++ b/sqlconnect/internal/integration_test/db_integration_test_scenario.go @@ -0,0 +1,609 @@ +package integrationtest + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "os" + "regexp" + "strings" + "sync" + "testing" + "text/template" + "time" + + "github.com/samber/lo" + "github.com/stretchr/testify/require" + "github.com/tidwall/sjson" + + "github.com/rudderlabs/rudder-go-kit/testhelper/rand" + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +type Options struct { + // LegacySupport enables the use of legacy column mappings + LegacySupport bool +} + +func TestDatabaseScenarios(t *testing.T, warehouse string, configJSON json.RawMessage, formatfn func(string) string, opts Options) { + schema := sqlconnect.SchemaRef{Name: GenerateTestSchema(formatfn)} + configJSON, err := sjson.SetBytes(configJSON, "rudderSchema", schema.Name) + require.NoError(t, err, "it should be able to set the rudder schema") + db, err := sqlconnect.NewDB(warehouse, configJSON) + require.NoError(t, err, "it should be able to create a new DB") + defer func() { _ = db.Close() }() + + ctx := context.Background() + cancelledCtx, cancel := context.WithCancel(context.Background()) + cancel() + + t.Run("using invalid configuration", func(t *testing.T) { + _, err := sqlconnect.NewDB(warehouse, []byte("invalid")) + require.Error(t, err, "it should return error for invalid configuration") + }) + + t.Run("ping", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + err := db.PingContext(cancelledCtx) + require.Error(t, err, "it should not be able to ping the database with a cancelled context") + }) + + t.Run("normal operation", func(t *testing.T) { + err := db.Ping() + require.NoError(t, err, "it should be able to ping the database") + + err = db.PingContext(ctx) + require.NoError(t, err, "it should be able to ping the database") + }) + }) + + t.Run("get rudder schema", func(t *testing.T) { + rudderSchema := db.GetRudderSchema() + require.Equal(t, schema.Name, rudderSchema, "it should be able to get the rudder schema") + }) + + t.Run("schema admin", func(t *testing.T) { + t.Run("schema doesn't exist", func(t *testing.T) { + exists, err := db.SchemaExists(ctx, schema) + require.NoError(t, err, "it should be able to check if a schema exists") + require.False(t, exists, "it should return false for a schema that doesn't exist") + }) + + t.Run("create", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + err := db.CreateSchema(cancelledCtx, schema) + require.Error(t, err, "it should not be able to create a schema with a cancelled context") + }) + + t.Run("normal operation", func(t *testing.T) { + err := db.CreateSchema(ctx, schema) + require.NoError(t, err, "it should be able to create a schema") + }) + + t.Run("idempotence", func(t *testing.T) { + err := db.CreateSchema(ctx, schema) + require.NoError(t, err, "it shouldn't fail if the schema already exists") + }) + }) + t.Run("exists", func(t *testing.T) { + exists, err := db.SchemaExists(ctx, schema) + require.NoError(t, err, "it should be able to check if a schema exists") + require.True(t, exists, "it should return true for a schema that exists") + + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.SchemaExists(cancelledCtx, schema) + require.Error(t, err, "it should not be able to check if a schema exists with a cancelled context") + }) + }) + + t.Run("list", func(t *testing.T) { + schemas, err := db.ListSchemas(ctx) + require.NoError(t, err, "it should be able to list schemas") + require.Contains(t, schemas, schema, "it should contain the created schema") + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.ListSchemas(cancelledCtx) + require.Error(t, err, "it should not be able to list schemas with a cancelled context") + }) + }) + + t.Run("drop", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + err := db.DropSchema(cancelledCtx, schema) + require.Error(t, err, "it should not be able to drop a schema with a cancelled context") + }) + + t.Run("normal operation", func(t *testing.T) { + otherSchema := sqlconnect.SchemaRef{Name: GenerateTestSchema(formatfn)} + err := db.CreateSchema(ctx, otherSchema) + require.NoError(t, err, "it should be able to create a schema") + err = db.DropSchema(ctx, otherSchema) + require.NoError(t, err, "it should be able to drop a schema") + }) + + t.Run("invalid schema naem", func(t *testing.T) { + err := db.DropSchema(ctx, sqlconnect.SchemaRef{Name: "nonexistent"}) + require.Error(t, err, "it shouldn't be able to drop a non-existent schema") + }) + }) + }) + + t.Run("table admin", func(t *testing.T) { + table := sqlconnect.NewRelationRef(formatfn("test_table"), sqlconnect.WithSchema(schema.Name)) + + t.Run("table doesn't exist", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.TableExists(cancelledCtx, table) + require.Error(t, err, "it should not be able to check if a table exists with a cancelled context") + }) + + exists, err := db.TableExists(ctx, table) + require.NoError(t, err, "it should be able to check if a table exists") + require.False(t, exists, "it should return false for a table that doesn't exist") + }) + + t.Run("create test table", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + err := db.CreateTestTable(cancelledCtx, table) + require.Error(t, err, "it should not be able to create a test table with a cancelled context") + }) + + err := db.CreateTestTable(ctx, table) + require.NoError(t, err, "it should be able to create a test table") + exists, err := db.TableExists(ctx, table) + require.NoError(t, err, "it should be able to check if a table exists") + require.True(t, exists, "it should return true for a table that was just created") + }) + + t.Run("list tables", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.ListTables(cancelledCtx, schema) + require.Error(t, err, "it should not be able to list tables with a cancelled context") + }) + + tables, err := db.ListTables(ctx, schema) + require.NoError(t, err, "it should be able to list tables") + require.Contains(t, tables, table, "it should contain the created table") + }) + + t.Run("list tables with prefix", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.ListTablesWithPrefix(cancelledCtx, schema, formatfn("test")) + require.Error(t, err, "it should not be able to list tables with a prefix with a cancelled context") + }) + + tables, err := db.ListTablesWithPrefix(ctx, schema, formatfn("test")) + require.NoError(t, err, "it should be able to list tables with a prefix") + require.Contains(t, tables, table, "it should contain the created table") + }) + + t.Run("list columns", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.ListColumns(cancelledCtx, table) + require.Error(t, err, "it should not be able to list columns with a cancelled context") + }) + + columns, err := db.ListColumns(ctx, table) + columns = lo.Map(columns, func(col sqlconnect.ColumnRef, _ int) sqlconnect.ColumnRef { + require.NotEmptyf(t, col.RawType, "it should return the raw type for column %q", col.Name) + col.RawType = "" + return col + }) + require.NoError(t, err, "it should be able to list columns") + require.Len(t, columns, 2, "it should return the correct number of columns") + require.ElementsMatch(t, columns, []sqlconnect.ColumnRef{ + {Name: formatfn("c1"), Type: "int"}, + {Name: formatfn("c2"), Type: "string"}, + }, "it should return the correct columns") + }) + + t.Run("list columns for sql query", func(t *testing.T) { + q := sqlconnect.QueryDef{ + Table: table, + Columns: []string{formatfn("c1")}, + } + stmt := q.ToSQL(db) + + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.ListColumnsForSqlQuery(cancelledCtx, stmt) + require.Error(t, err, "it should not be able to list columns for a sql query with a cancelled context") + }) + + columns, err := db.ListColumnsForSqlQuery(ctx, stmt) + columns = lo.Map(columns, func(col sqlconnect.ColumnRef, _ int) sqlconnect.ColumnRef { + require.NotEmptyf(t, col.RawType, "it should return the raw type for column %q", col.Name) + col.RawType = "" + return col + }) + require.NoError(t, err, "it should be able to list columns for a sql query") + require.Len(t, columns, 1, "it should return the correct number of columns") + require.ElementsMatch(t, columns, []sqlconnect.ColumnRef{ + {Name: formatfn("c1"), Type: "int"}, + }, "it should return the correct columns") + }) + + t.Run("count table rows", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.CountTableRows(cancelledCtx, table) + require.Error(t, err, "it should not be able to count table rows with a cancelled context") + }) + + count, err := db.CountTableRows(ctx, table) + require.NoError(t, err, "it should be able to count table rows") + require.Equal(t, 0, count, "it should return 0 for a table with no rows") + + // add a row + _, err = db.ExecContext(ctx, fmt.Sprintf("INSERT INTO %s (c1, c2) VALUES (1, '1')", db.QuoteTable(table))) + require.NoError(t, err, "it should be able to insert a row") + + count, err = db.CountTableRows(ctx, table) + require.NoError(t, err, "it should be able to count table rows") + require.Equal(t, 1, count, "it should return 1 for a table with one row") + }) + + t.Run("truncate table", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + err := db.TruncateTable(cancelledCtx, table) + require.Error(t, err, "it should not be able to truncate a table with a cancelled context") + }) + + err := db.TruncateTable(ctx, table) + require.NoError(t, err, "it should be able to truncate a table") + count, err := db.CountTableRows(ctx, table) + require.NoError(t, err, "it should be able to count table rows") + require.Equal(t, 0, count, "it should return 0 for a table with no rows") + }) + + t.Run("rename table", func(t *testing.T) { + table := sqlconnect.NewRelationRef(formatfn("test_table_torename"), sqlconnect.WithSchema(schema.Name)) + err := db.CreateTestTable(ctx, table) + require.NoError(t, err, "it should be able to create a test table") + newTable := sqlconnect.NewRelationRef(formatfn("test_table_renamed"), sqlconnect.WithSchema(schema.Name)) + + t.Run("with context cancelled", func(t *testing.T) { + err := db.RenameTable(cancelledCtx, table, newTable) + require.Error(t, err, "it should not be able to rename a table with a cancelled context") + }) + + t.Run("using different schemas", func(t *testing.T) { + newTableWithDifferentSchema := newTable + newTableWithDifferentSchema.Schema = newTableWithDifferentSchema.Schema + "_other" + err := db.RenameTable(ctx, table, newTableWithDifferentSchema) + require.Error(t, err, "it should not be able to rename a table to a different schema") + }) + + t.Run("normal operation", func(t *testing.T) { + err := db.RenameTable(ctx, table, newTable) + require.NoError(t, err, "it should be able to rename a table") + + exists, err := db.TableExists(ctx, newTable) + require.NoError(t, err, "it should be able to check if a table exists") + require.True(t, exists, "it should return true for a table that was just renamed") + + exists, err = db.TableExists(ctx, table) + require.NoError(t, err, "it should be able to check if the old table exists") + require.False(t, exists, "it should return false for the old table which was just renamed") + }) + }) + + t.Run("move table", func(t *testing.T) { + table := sqlconnect.NewRelationRef(formatfn("test_table_torename"), sqlconnect.WithSchema(schema.Name)) + err := db.CreateTestTable(ctx, table) + require.NoError(t, err, "it should be able to create a test table") + cols, err := db.ListColumns(ctx, table) + require.NoError(t, err, "it should be able to list columns") + + newTable := sqlconnect.NewRelationRef(formatfn("test_table_moved"), sqlconnect.WithSchema(schema.Name)) + + t.Run("with context cancelled", func(t *testing.T) { + err := db.MoveTable(cancelledCtx, table, newTable) + require.Error(t, err, "it should not be able to move a table with a cancelled context") + }) + + t.Run("using different schemas", func(t *testing.T) { + newTableWithDifferentSchema := newTable + newTableWithDifferentSchema.Schema = newTableWithDifferentSchema.Schema + "_other" + err := db.MoveTable(ctx, table, newTableWithDifferentSchema) + require.Error(t, err, "it should not be able to move a table to a different schema") + }) + + t.Run("normal operation", func(t *testing.T) { + err := db.MoveTable(ctx, table, newTable) + require.NoError(t, err, "it should be able to rename a table") + + exists, err := db.TableExists(ctx, newTable) + require.NoError(t, err, "it should be able to check if a table exists") + require.True(t, exists, "it should return true for a table that was just moved") + + newCols, err := db.ListColumns(ctx, newTable) + require.NoError(t, err, "it should be able to list columns") + require.ElementsMatch(t, newCols, cols, "it should return the same columns for the new table") + + exists, err = db.TableExists(ctx, table) + require.NoError(t, err, "it should be able to check if the old table exists") + require.False(t, exists, "it should return false for the old table which was just moved") + }) + }) + + t.Run("drop table", func(t *testing.T) { + table := sqlconnect.NewRelationRef(formatfn("test_table_todrop"), sqlconnect.WithSchema(schema.Name)) + err := db.CreateTestTable(ctx, table) + require.NoError(t, err, "it should be able to create a test table") + + t.Run("with context cancelled", func(t *testing.T) { + err := db.DropTable(cancelledCtx, table) + require.Error(t, err, "it should not be able to drop a table with a cancelled context") + }) + + err = db.DropTable(ctx, table) + require.NoError(t, err, "it should be able to drop a table") + exists, err := db.TableExists(ctx, table) + require.NoError(t, err, "it should be able to check if a table exists") + require.False(t, exists, "it should return false for a table that was just dropped") + }) + + table2 := sqlconnect.NewRelationRef(formatfn("test_table_2"), sqlconnect.WithSchema(schema.Name)) + t.Run("create table from query", func(t *testing.T) { + table := sqlconnect.NewRelationRef(formatfn("test_table_from_query"), sqlconnect.WithSchema(schema.Name)) + t.Run("with context cancelled", func(t *testing.T) { + err := db.CreateTableFromQuery(cancelledCtx, table2, "SELECT 1") + require.Error(t, err, "it should not be able to create a table from a query with a cancelled context") + }) + + err := db.CreateTableFromQuery(ctx, table, "SELECT 1 AS numcol") + require.NoError(t, err, "it should be able to create a table from a query") + exists, err := db.TableExists(ctx, table) + require.NoError(t, err, "it should be able to check if a table exists") + require.True(t, exists, "it should return true for a table that was just created from a query") + }) + + t.Run("get row count for query", func(t *testing.T) { + t.Run("with context cancelled", func(t *testing.T) { + _, err := db.GetRowCountForQuery(cancelledCtx, "SELECT 1") + require.Error(t, err, "it should not be able to get row count for a query with a cancelled context") + }) + + count, err := db.GetRowCountForQuery(ctx, "SELECT 2") + require.NoError(t, err, "it should be able to get row count for a query") + require.Equal(t, 2, count, "it should return the correct row count for a query") + }) + }) + + t.Run("column mapping", func(t *testing.T) { + table := sqlconnect.NewRelationRef(formatfn("column_mappings_test"), sqlconnect.WithSchema(schema.Name)) + ExecuteStatements(t, db, schema.Name, "testdata/column-mapping-test-seed.sql") + + expectedColsJSON, err := os.ReadFile("testdata/column-mapping-test-columns.json") + require.NoErrorf(t, err, "it should be able to read the column mappings json file") + var expectedColsMap map[string]string + err = json.Unmarshal(expectedColsJSON, &expectedColsMap) + require.NoErrorf(t, err, "it should be able to unmarshal the column mappings json file") + expectedCols := lo.MapToSlice(expectedColsMap, func(k, v string) sqlconnect.ColumnRef { + return sqlconnect.ColumnRef{Name: k, Type: v} + }) + + exists, err := db.TableExists(ctx, table) + require.NoError(t, err, "it should be able to check if a table exists") + require.True(t, exists, "it should return true for a table that exists") + + selectStmt := sqlconnect.QueryDef{Table: table, OrderBy: &sqlconnect.QueryOrder{Column: formatfn("_order"), Order: "ASC"}} + selectSQL := selectStmt.ToSQL(db) + + t.Run("list columns", func(t *testing.T) { + actualCols, err := db.ListColumns(ctx, table) + require.NoError(t, err, "it should be able to list columns") + actualCols = lo.Map(actualCols, func(col sqlconnect.ColumnRef, _ int) sqlconnect.ColumnRef { + require.NotEmptyf(t, col.RawType, "it should return the raw type for column %q", col.Name) + col.RawType = "" + return col + }) + require.ElementsMatch(t, actualCols, expectedCols, "it should return the correct columns") + }) + + t.Run("list columns for sql query", func(t *testing.T) { + actualCols, err := db.ListColumnsForSqlQuery(ctx, selectSQL) + actualCols = lo.Map(actualCols, func(col sqlconnect.ColumnRef, _ int) sqlconnect.ColumnRef { + require.NotEmptyf(t, col.RawType, "it should return the raw type for column %q", col.Name) + col.RawType = "" + return col + }) + require.NoError(t, err, "it should be able to list columns") + require.ElementsMatch(t, actualCols, expectedCols, "it should return the correct columns") + }) + + t.Run("json mapper", func(t *testing.T) { + expectedRowsJSON, err := os.ReadFile("testdata/column-mapping-test-rows.json") + require.NoErrorf(t, err, "it should be able to read the rows json file") + + ch, leave := sqlconnect.QueryJSONAsync(ctx, db, selectSQL) + defer leave() + var rows []any + for row := range ch { + require.NoError(t, row.Err, "it should be able to scan a row") + var o any + err := json.Unmarshal(row.Value, &o) + require.NoError(t, err, "it should be able to unmarshal the row") + rows = append(rows, o) + } + actualRowsJSON, err := json.Marshal(rows) + require.NoError(t, err, "it should be able to marshal the rows") + + require.JSONEq(t, string(expectedRowsJSON), string(actualRowsJSON), "it should return the correct rows: "+string(actualRowsJSON)) + + // verify that the json types are in parity with the column types + cols, err := db.ListColumnsForSqlQuery(ctx, selectSQL) + require.NoError(t, err, "it should be able to list columns") + var actualRows []map[string]any + require.NoError(t, json.Unmarshal(actualRowsJSON, &actualRows)) + require.Greater(t, len(actualRows), 0, "it should return at least one row") + actualRow := actualRows[0] + + for _, col := range cols { + switch col.Type { + case "int": + f, ok := actualRow[col.Name].(float64) + require.Truef(t, ok, "column of type int should be parsed as a float64 %q: %v", col.Name, actualRow[col.Name]) + require.Equalf(t, float64(int(f)), f, "column of type int should be an integer %q: %v", col.Name, actualRow[col.Name]) + case "float": + _, ok := actualRow[col.Name].(float64) + require.Truef(t, ok, "column of type float should be parsed as a float64 %q: %v", col.Name, actualRow[col.Name]) + case "string": + _, ok := actualRow[col.Name].(string) + require.Truef(t, ok, "column of type string should be parsed as a string %q: %v", col.Name, actualRow[col.Name]) + case "boolean": + _, ok := actualRow[col.Name].(bool) + require.Truef(t, ok, "column of type boolean should be parsed as a bool %q: %v", col.Name, actualRow[col.Name]) + case "datetime": + datetime, ok := actualRow[col.Name].(string) + require.Truef(t, ok, "column of type datetime should be parsed as a datetime %q: %v", col.Name, actualRow[col.Name]) + _, err := time.Parse(time.RFC3339, datetime) + require.NoErrorf(t, err, "column of type datetime should be a RFC3339 string %q: %v", col.Name, actualRow[col.Name]) + case "json": + // this can be anything + default: + t.Errorf("unexpected column type %s for column %q: %v", col.Type, col.Name, actualRow[col.Name]) + } + } + }) + + t.Run("legacy column and json mapper", func(t *testing.T) { + if !opts.LegacySupport { + t.Skip("legacy column and json mapper test skipped for warehouse " + warehouse) + } + altConfigJSON, err := sjson.SetBytes(configJSON, "useLegacyMappings", true) + require.NoError(t, err, "it should be able to set useLegacyMappings") + legacyDB, err := sqlconnect.NewDB(warehouse, altConfigJSON) + require.NoError(t, err, "it should be able to create a new DB") + defer func() { _ = legacyDB.Close() }() + + t.Run("list columns", func(t *testing.T) { + expectedColsJSON, err := os.ReadFile("testdata/legacy-column-mapping-test-columns-table.json") + require.NoErrorf(t, err, "it should be able to read the legacy column mappings json file") + var expectedColsMap map[string]string + err = json.Unmarshal(expectedColsJSON, &expectedColsMap) + require.NoErrorf(t, err, "it should be able to unmarshal the legacy column mappings json file") + expectedCols := lo.MapToSlice(expectedColsMap, func(k, v string) sqlconnect.ColumnRef { + return sqlconnect.ColumnRef{Name: k, Type: v} + }) + + actualCols, err := legacyDB.ListColumns(ctx, table) + require.NoError(t, err, "it should be able to list columns") + actualCols = lo.Map(actualCols, func(col sqlconnect.ColumnRef, _ int) sqlconnect.ColumnRef { + require.NotEmptyf(t, col.RawType, "it should return the raw type for column %q", col.Name) + col.RawType = "" + return col + }) + require.ElementsMatch(t, actualCols, expectedCols, "it should return the correct columns") + }) + + t.Run("list columns for sql query", func(t *testing.T) { + expectedColsJSON, err := os.ReadFile("testdata/legacy-column-mapping-test-columns-sql.json") + require.NoErrorf(t, err, "it should be able to read the legacy column mappings json file") + var expectedColsMap map[string]string + err = json.Unmarshal(expectedColsJSON, &expectedColsMap) + require.NoErrorf(t, err, "it should be able to unmarshal the legacy column mappings json file") + expectedCols := lo.MapToSlice(expectedColsMap, func(k, v string) sqlconnect.ColumnRef { + return sqlconnect.ColumnRef{Name: k, Type: v} + }) + + actualCols, err := legacyDB.ListColumnsForSqlQuery(ctx, selectSQL) + require.NoError(t, err, "it should be able to list columns") + actualCols = lo.Map(actualCols, func(col sqlconnect.ColumnRef, _ int) sqlconnect.ColumnRef { + require.NotEmptyf(t, col.RawType, "it should return the raw type for column %q", col.Name) + col.RawType = "" + return col + }) + require.ElementsMatch(t, actualCols, expectedCols, "it should return the correct columns") + }) + + t.Run("json mapper", func(t *testing.T) { + expectedRowsJSON, err := os.ReadFile("testdata/legacy-column-mapping-test-rows.json") + require.NoErrorf(t, err, "it should be able to read the legacy rows json file") + + ch, leave := sqlconnect.QueryJSONAsync(ctx, legacyDB, selectSQL) + defer leave() + var rows []any + for row := range ch { + require.NoError(t, row.Err, "it should be able to scan a row") + var o any + err := json.Unmarshal(row.Value, &o) + require.NoError(t, err, "it should be able to unmarshal the row") + rows = append(rows, o) + } + actualRowsJSON, err := json.Marshal(rows) + require.NoError(t, err, "it should be able to marshal the rows") + + require.JSONEq(t, string(expectedRowsJSON), string(actualRowsJSON), "it should return the correct rows: "+string(actualRowsJSON)) + }) + }) + + t.Run("async query", func(t *testing.T) { + t.Run("QueryJSONMapAsync without error", func(t *testing.T) { + ch, leave := sqlconnect.QueryJSONMapAsync(ctx, db, selectSQL) + defer leave() + for row := range ch { + require.NoError(t, row.Err, "it should be able to scan a row") + } + }) + + t.Run("QueryJSONMapAsync with context cancelled", func(t *testing.T) { + ch, leave := sqlconnect.QueryJSONMapAsync(cancelledCtx, db, selectSQL) + defer leave() + var iterations int + for row := range ch { + iterations++ + require.Error(t, row.Err) + require.True(t, errors.Is(row.Err, context.Canceled)) + } + require.Equal(t, 1, iterations, "it should only iterate once") + }) + + t.Run("QueryJSONMapAsync with leave", func(t *testing.T) { + ch, leave := sqlconnect.QueryJSONMapAsync(cancelledCtx, db, selectSQL) + leave() + time.Sleep(10 * time.Millisecond) + var wg sync.WaitGroup + var iterations int + wg.Add(1) + go func() { + for range ch { + iterations++ + } + wg.Done() + }() + wg.Wait() + require.Equal(t, 0, iterations, "it shouldn't iterate after leaving the channel") + }) + }) + }) +} + +func GenerateTestSchema(formatfn func(string) string) string { + return formatfn(fmt.Sprintf("tsqlcon_%s_%d", rand.String(12), time.Now().Unix())) +} + +func ExecuteStatements(t *testing.T, c sqlconnect.DB, schema, path string) { + for _, stmt := range ReadSQLStatements(t, schema, path) { + _, err := c.ExecContext(context.Background(), stmt) + require.NoErrorf(t, err, "it should be able to execute sql statement:\n%s", stmt) + } +} + +func ReadSQLStatements(t *testing.T, schema, path string) []string { + t.Helper() + SQLComment := regexp.MustCompile(`(?m)--.*\n`) + data, err := os.ReadFile(path) + require.NoErrorf(t, err, "it should be able to read the sql script file %q", path) + tpl, err := template.New("data").Parse(string(data)) + require.NoErrorf(t, err, "it should be able to parse the sql script file %q", path) + sql := new(strings.Builder) + templateData := map[string]any{"schema": schema} + err = tpl.Execute(sql, templateData) + require.NoErrorf(t, err, "it should be able to execute the sql script file %q", path) + allStmts := sql.String() + stmts := lo.FilterMap(strings.Split(allStmts, ";"), func(stmt string, _ int) (string, bool) { + stmt = SQLComment.ReplaceAllString(strings.TrimSpace(stmt), "") + return stmt, stmt != "" + }) + return stmts +} diff --git a/sqlconnect/internal/mysql/config.go b/sqlconnect/internal/mysql/config.go new file mode 100644 index 0000000..187baa3 --- /dev/null +++ b/sqlconnect/internal/mysql/config.go @@ -0,0 +1,53 @@ +package mysql + +import ( + "encoding/json" + "fmt" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/util" +) + +type Config struct { + Host string `json:"host"` + Port int `json:"port"` + DBName string `json:"dbname"` + User string `json:"user"` + Password string `json:"password"` + SSLMode string `json:"sslmode"` + + // SkipHostValidation is used to skip host validation during tests + SkipHostValidation bool `json:"skipHostValidation"` + // RudderSchema is used to override the default rudder schema name during tests + RudderSchema string `json:"rudderSchema"` + UseLegacyMappings bool `json:"useLegacyMappings"` +} + +func (c Config) ConnectionString() (string, error) { + tls, err := c.TLS() + if err != nil { + return "", fmt.Errorf("error while creating connecton string, %w", err) + } + return fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?tls=%s", c.User, c.Password, c.Host, c.Port, c.DBName, tls), nil +} + +func (c Config) TLS() (string, error) { + switch c.SSLMode { + case "skip-verify", "false": + return c.SSLMode, nil + case "": + return "false", nil + default: + return "", fmt.Errorf("sslmode %s for mysql connection is not supported", c.SSLMode) + } +} + +func (c *Config) Parse(input json.RawMessage) error { + err := json.Unmarshal(input, c) + if err != nil { + return err + } + if !c.SkipHostValidation { + return util.ValidateHost(c.Host) + } + return nil +} diff --git a/sqlconnect/internal/mysql/config_test.go b/sqlconnect/internal/mysql/config_test.go new file mode 100644 index 0000000..82eafe2 --- /dev/null +++ b/sqlconnect/internal/mysql/config_test.go @@ -0,0 +1,51 @@ +package mysql_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/mysql" +) + +func TestConfig(t *testing.T) { + t.Run("host validation", func(t *testing.T) { + var config mysql.Config + err := config.Parse([]byte(`{"host": "localhost"}`)) + require.Error(t, err, "should not allow localhost") + + err = config.Parse([]byte(`{"host": "127.0.0.1"}`)) + require.Error(t, err, "should not allow 127.0.0.1") + + err = config.Parse([]byte(`{"host": "0.0.0.0"}`)) + require.Error(t, err, "should not allow 0.0.0.0") + }) + + t.Run("tls", func(t *testing.T) { + t.Run("empty ssl mode", func(t *testing.T) { + c := mysql.Config{SSLMode: ""} + tls, err := c.TLS() + require.NoError(t, err, "should allow empty tls") + require.Equal(t, "false", tls, "should return false") + }) + + t.Run("skip-verify ssl mode", func(t *testing.T) { + c := mysql.Config{SSLMode: "skip-verify"} + tls, err := c.TLS() + require.NoError(t, err, "should allow skip-verify tls") + require.Equal(t, "skip-verify", tls, "should return skip-verify") + }) + t.Run("false ssl mode", func(t *testing.T) { + c := mysql.Config{SSLMode: "false"} + tls, err := c.TLS() + require.NoError(t, err, "should allow false tls") + require.Equal(t, "false", tls, "should return false") + }) + + t.Run("other ssl mode", func(t *testing.T) { + c := mysql.Config{SSLMode: "other"} + _, err := c.TLS() + require.Error(t, err, "should not allow other tls") + }) + }) +} diff --git a/sqlconnect/internal/mysql/db.go b/sqlconnect/internal/mysql/db.go new file mode 100644 index 0000000..0737d57 --- /dev/null +++ b/sqlconnect/internal/mysql/db.go @@ -0,0 +1,79 @@ +package mysql + +import ( + "database/sql" + "encoding/json" + "fmt" + + _ "github.com/go-sql-driver/mysql" // mysql driver + "github.com/samber/lo" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +const ( + DatabaseType = "mysql" + defaultRudderSchema = "_rudderstack" +) + +// NewDB creates a new postgres-specific client +func NewDB(configJSON json.RawMessage) (*DB, error) { + var config Config + err := config.Parse(configJSON) + if err != nil { + return nil, err + } + + connectionString, err := config.ConnectionString() + if err != nil { + return nil, err + } + db, err := sql.Open(DatabaseType, connectionString) + if err != nil { + return nil, err + } + + return &DB{ + DB: base.NewDB( + db, + lo.Ternary(config.RudderSchema != "", config.RudderSchema, defaultRudderSchema), + base.WithColumnTypeMappings(getColumnTypeMappings(config)), + base.WithJsonRowMapper(getJonRowMapper(config)), + base.WithSQLCommandsOverride(func(cmds base.SQLCommands) base.SQLCommands { + cmds.DropSchema = func(schema string) string { // mysql does not support CASCADE + return fmt.Sprintf("DROP SCHEMA %[1]s", schema) + } + cmds.RenameTable = func(schema, oldName, newName string) string { + return fmt.Sprintf("RENAME TABLE %[1]s.%[2]s TO %[1]s.%[3]s", schema, oldName, newName) + } + return cmds + }), + base.WithDialect(dialect{}), + ), + }, nil +} + +func init() { + sqlconnect.RegisterDBFactory(DatabaseType, func(credentialsJSON json.RawMessage) (sqlconnect.DB, error) { + return NewDB(credentialsJSON) + }) +} + +type DB struct { + *base.DB +} + +func getColumnTypeMappings(config Config) map[string]string { + if config.UseLegacyMappings { + return nil + } + return columnTypeMappings +} + +func getJonRowMapper(config Config) func(databaseTypeName string, value any) any { + if config.UseLegacyMappings { + return legacyJsonRowMapper + } + return jsonRowMapper +} diff --git a/sqlconnect/internal/mysql/dialect.go b/sqlconnect/internal/mysql/dialect.go new file mode 100644 index 0000000..6ff3a02 --- /dev/null +++ b/sqlconnect/internal/mysql/dialect.go @@ -0,0 +1,27 @@ +package mysql + +import ( + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +type dialect struct{} + +// QuoteTable quotes a table name +func (d dialect) QuoteTable(table sqlconnect.RelationRef) string { + if table.Schema != "" { + return d.QuoteIdentifier(table.Schema) + "." + d.QuoteIdentifier(table.Name) + } + return d.QuoteIdentifier(table.Name) +} + +// QuoteIdentifier quotes an identifier, e.g. a column name +func (d dialect) QuoteIdentifier(name string) string { + return "`" + name + "`" +} + +// FormatTableName formats a table name, typically by lower or upper casing it, depending on the database +func (d dialect) FormatTableName(name string) string { + return strings.ToLower(name) +} diff --git a/sqlconnect/internal/mysql/dialect_test.go b/sqlconnect/internal/mysql/dialect_test.go new file mode 100644 index 0000000..28ffc84 --- /dev/null +++ b/sqlconnect/internal/mysql/dialect_test.go @@ -0,0 +1,30 @@ +package mysql + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestDialect(t *testing.T) { + var d dialect + t.Run("format table", func(t *testing.T) { + formatted := d.FormatTableName("TaBle") + require.Equal(t, "table", formatted, "table name should be lowercased") + }) + + t.Run("quote identifier", func(t *testing.T) { + quoted := d.QuoteIdentifier("column") + require.Equal(t, "`column`", quoted, "column name should be quoted with backticks") + }) + + t.Run("quote table", func(t *testing.T) { + quoted := d.QuoteTable(sqlconnect.NewRelationRef("table")) + require.Equal(t, "`table`", quoted, "table name should be quoted with backticks") + + quoted = d.QuoteTable(sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema"))) + require.Equal(t, "`schema`.`table`", quoted, "schema and table name should be quoted with backticks") + }) +} diff --git a/sqlconnect/internal/mysql/integration_test.go b/sqlconnect/internal/mysql/integration_test.go new file mode 100644 index 0000000..9e55a92 --- /dev/null +++ b/sqlconnect/internal/mysql/integration_test.go @@ -0,0 +1,38 @@ +package mysql_test + +import ( + "encoding/json" + "strconv" + "strings" + "testing" + + "github.com/ory/dockertest/v3" + "github.com/stretchr/testify/require" + + mysqlresource "github.com/rudderlabs/rudder-go-kit/testhelper/docker/resource/mysql" + integrationtest "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/integration_test" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/mysql" +) + +func TestMysqlDB(t *testing.T) { + pool, err := dockertest.NewPool("") + require.NoError(t, err, "it should be able to create a docker pool") + + mysqlResource, err := mysqlresource.Setup(pool, t) + require.NoError(t, err, "it should be able to create a postgres resource") + port, err := strconv.Atoi(mysqlResource.Port) + require.NoError(t, err, "it should be able to convert postgres port to int") + config := mysql.Config{ + Host: mysqlResource.Host, + Port: port, + DBName: mysqlResource.Database, + User: mysqlResource.User, + Password: mysqlResource.Password, + SSLMode: "false", + SkipHostValidation: true, + } + configJSON, err := json.Marshal(config) + require.NoError(t, err, "it should be able to marshal config to json") + + integrationtest.TestDatabaseScenarios(t, mysql.DatabaseType, configJSON, strings.ToLower, integrationtest.Options{LegacySupport: true}) +} diff --git a/sqlconnect/internal/mysql/legacy_mappings.go b/sqlconnect/internal/mysql/legacy_mappings.go new file mode 100644 index 0000000..7369752 --- /dev/null +++ b/sqlconnect/internal/mysql/legacy_mappings.go @@ -0,0 +1,63 @@ +package mysql + +import ( + "encoding/json" + "strconv" + "time" +) + +// legacyJsonRowMapper maps a row's scanned column to a json object's field +func legacyJsonRowMapper(databaseTypeName string, value any) any { + if value == nil { + return nil + } + switch databaseTypeName { + case "CHAR", "VARCHAR", "BLOB", "TEXT", "TINYBLOB", "TINYTEXT", "MEDIUMBLOB", "MEDIUMTEXT", "LONGBLOB", "LONGTEXT", "ENUM": + switch v := value.(type) { + case []uint8: + return string(v) + default: + return json.RawMessage(value.(string)) + } + case "DATE", "DATETIME", "TIMESTAMP", "TIME", "YEAR": + switch v := value.(type) { + case []uint8: + return string(v) + default: + return value.(time.Time) + } + + case "FLOAT", "DOUBLE", "DECIMAL": + switch v := value.(type) { + case []uint8: + n, err := strconv.ParseFloat(string(v), 64) + if err != nil { + panic(err) + } + return n + default: + n, err := strconv.ParseFloat(value.(string), 64) + if err != nil { + panic(err) + } + return n + } + case "INT", "TINYINT", "SMALLINt", "MEDIUMINT", "BIGINT": + switch v := value.(type) { + case []uint8: + n, err := strconv.ParseInt(string(v), 10, 64) + if err != nil { + panic(err) + } + return n + default: + n, err := strconv.ParseInt(value.(string), 10, 64) + if err != nil { + panic(err) + } + return n + } + } + + return value +} diff --git a/sqlconnect/internal/mysql/mappings.go b/sqlconnect/internal/mysql/mappings.go new file mode 100644 index 0000000..7b01fac --- /dev/null +++ b/sqlconnect/internal/mysql/mappings.go @@ -0,0 +1,104 @@ +package mysql + +import ( + "encoding/binary" + "encoding/json" + "strconv" + "time" +) + +// mapping of database column types to rudder types +var columnTypeMappings = map[string]string{ + "INTEGER": "int", + "INT": "int", + "TINYINT": "int", + "SMALLINT": "int", + "MEDIUMINT": "int", + "BIGINT": "int", + "DECIMAL": "float", + "NUMERIC": "float", + "FLOAT": "float", + "DOUBLE": "float", + "BIT": "int", + "CHAR": "string", + "VARCHAR": "string", + "BINARY": "string", + "VARBINARY": "string", + "BLOB": "string", + "TINYBLOB": "string", + "MEDIUMBLOB": "string", + "LONGBLOB": "string", + "TEXT": "string", + "TINYTEXT": "string", + "MEDIUMTEXT": "string", + "LONGTEXT": "string", + "ENUM": "string", + "SET": "string", + "DATE": "datetime", + "DATETIME": "datetime", + "TIMESTAMP": "datetime", + "TIME": "datetime", + "YEAR": "datetime", + "JSON": "json", +} + +func jsonRowMapper(databaseTypeName string, value interface{}) interface{} { + if value == nil { + return nil + } + + var stringValue string + switch v := value.(type) { + case []uint8: + stringValue = string(v) + case time.Time: + stringValue = v.String() + case string: + stringValue = v + } + + switch databaseTypeName { + case "CHAR", "VARCHAR", "BLOB", "TEXT", "TINYBLOB", "TINYTEXT", "MEDIUMBLOB", "MEDIUMTEXT", "LONGBLOB", "LONGTEXT", + "ENUM", "BINARY", "VARBINARY", + "SET": + return stringValue + case "TIME": + if p, err := time.Parse("15:04:05", stringValue); err == nil { + return p + } + return stringValue + case "DATE": + if p, err := time.Parse("2006-01-02", stringValue); err == nil { + return p + } + return stringValue + case "DATETIME", "TIMESTAMP": + if p, err := time.Parse("2006-01-02 15:04:05", stringValue); err == nil { + return p + } + return stringValue + case "YEAR": + if p, err := time.Parse("2006", stringValue); err == nil { + return p + } + return stringValue + case "JSON": + return json.RawMessage(stringValue) + case "FLOAT", "DOUBLE", "DECIMAL": + n, err := strconv.ParseFloat(stringValue, 64) + if err != nil { + panic(err) + } + return n + case "INT", "TINYINT", "SMALLINT", "MEDIUMINT", "BIGINT": + n, err := strconv.ParseInt(stringValue, 10, 64) + if err != nil { + panic(err) + } + return n + case "BIT": + n := binary.BigEndian.Uint64([]byte(stringValue)) + return n + } + return value +} diff --git a/sqlconnect/internal/mysql/testdata/column-mapping-test-columns.json b/sqlconnect/internal/mysql/testdata/column-mapping-test-columns.json new file mode 100644 index 0000000..673101b --- /dev/null +++ b/sqlconnect/internal/mysql/testdata/column-mapping-test-columns.json @@ -0,0 +1,34 @@ +{ + "_order": "int", + "_integer": "int", + "_int": "int", + "_tinyint": "int", + "_smallint": "int", + "_mediumint": "int", + "_bigint": "int", + "_decimal": "float", + "_numeric": "float", + "_float": "float", + "_double": "float", + "_bit": "int", + "_char": "string", + "_varchar": "string", + "_bin": "string", + "_varbinary": "string", + "_blob": "string", + "_tinyblob": "string", + "_mediumblob": "string", + "_longblob": "string", + "_text": "string", + "_tinytext": "string", + "_mediumtext": "string", + "_longtext": "string", + "_enum": "string", + "_set": "string", + "_date": "datetime", + "_datetime": "datetime", + "_timestamp": "datetime", + "_time": "datetime", + "_year": "datetime", + "_json": "json" +} diff --git a/sqlconnect/internal/mysql/testdata/column-mapping-test-rows.json b/sqlconnect/internal/mysql/testdata/column-mapping-test-rows.json new file mode 100644 index 0000000..801b199 --- /dev/null +++ b/sqlconnect/internal/mysql/testdata/column-mapping-test-rows.json @@ -0,0 +1,104 @@ +[ + { + "_order": 1, + "_integer": 1, + "_int": 1, + "_tinyint": 1, + "_smallint": 1, + "_mediumint": 1, + "_bigint": 1, + "_decimal": 1.1, + "_numeric": 1.1, + "_float": 1.1, + "_double": 1.1, + "_bit": 3047, + "_char": "a", + "_varchar": "abc", + "_bin": "a\u0000\u0000\u0000\u0000\u0000\u0000\u0000", + "_varbinary": "a", + "_blob": "b", + "_tinyblob": "b", + "_mediumblob": "b", + "_longblob": "b", + "_text": "t", + "_tinytext": "t", + "_mediumtext": "t", + "_longtext": "t", + "_enum": "1", + "_set": "one,two", + "_date": "2020-01-01T00:00:00Z", + "_datetime": "2020-01-01T15:10:10Z", + "_timestamp": "2020-01-01T15:10:10Z", + "_time": "0000-01-01T10:45:15Z", + "_year": "2020-01-01T00:00:00Z", + "_json": {"key": "value"} + }, + { + "_order": 2, + "_integer": 0, + "_int": 0, + "_tinyint": 0, + "_smallint": 0, + "_mediumint": 0, + "_bigint": 0, + "_decimal": 0, + "_numeric": 0, + "_float": 0, + "_double": 0, + "_bit": 0, + "_char": "", + "_varchar": "", + "_bin": "\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000", + "_varbinary": "", + "_blob": "", + "_tinyblob": "", + "_mediumblob": "", + "_longblob": "", + "_text": "", + "_tinytext": "", + "_mediumtext": "", + "_longtext": "", + "_enum": "1", + "_set": "", + "_date": "2020-01-01T00:00:00Z", + "_datetime": "2020-01-01T15:10:10Z", + "_timestamp": "2020-01-01T15:10:10Z", + "_time": "0000-01-01T10:45:15Z", + "_year": "2020-01-01T00:00:00Z", + "_json": {} + }, + { + "_order": 3, + "_integer": null, + "_int": null, + "_tinyint": null, + "_smallint": null, + "_mediumint": null, + "_bigint": null, + "_decimal": null, + "_numeric": null, + "_float": null, + "_double": null, + "_bit": null, + "_char": null, + "_varchar": null, + "_bin": null, + "_varbinary": null, + "_blob": null, + "_tinyblob": null, + "_mediumblob": null, + "_longblob": null, + "_text": null, + "_tinytext": null, + "_mediumtext": null, + "_longtext": null, + "_enum": null, + "_set": null, + "_date": null, + "_datetime": null, + "_timestamp": null, + "_time": null, + "_year": null, + "_json": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/mysql/testdata/column-mapping-test-seed.sql b/sqlconnect/internal/mysql/testdata/column-mapping-test-seed.sql new file mode 100644 index 0000000..8cf40f5 --- /dev/null +++ b/sqlconnect/internal/mysql/testdata/column-mapping-test-seed.sql @@ -0,0 +1,41 @@ +CREATE TABLE `{{.schema}}`.`column_mappings_test` ( + _order INT, + _integer INTEGER, + _int INT, + _tinyint TINYINT, + _smallint SMALLINT, + _mediumint MEDIUMINT, + _bigint BIGINT, + _decimal DECIMAL(5,2), + _numeric NUMERIC(5,2), + _float FLOAT, + _double DOUBLE, + _bit BIT(64), + _char CHAR(1), + _varchar VARCHAR(10), + _bin BINARY(8), + _varbinary VARBINARY(100), + _blob BLOB, + _tinyblob TINYBLOB, + _mediumblob MEDIUMBLOB, + _longblob LONGBLOB, + _text TEXT(10), + _tinytext TINYTEXT, + _mediumtext MEDIUMTEXT, + _longtext LONGTEXT, + _enum ENUM('1', '2', '3'), + _set SET('one', 'two', 'three'), + _date DATE, + _datetime DATETIME, + _timestamp TIMESTAMP, + _time TIME, + _year YEAR, + _json JSON +); + +INSERT INTO `{{.schema}}`.`column_mappings_test` + (_order, _integer, _int, _tinyint, _smallint, _mediumint, _bigint, _decimal, _numeric, _float, _double, _bit, _char, _varchar, _bin, _varbinary, _blob, _tinyblob, _mediumblob, _longblob, _text, _tinytext, _mediumtext, _longtext, _enum, _set, _date, _datetime, _timestamp, _time, _year, _json) +VALUES + (1, 1, 1, 1, 1, 1, 1, 1.1, 1.1, 1.1, 1.1, b'101111100111', 'a', 'abc', 'a', 'a', 'b', 'b', 'b', 'b', 't', 't', 't', 't', '1', 'one,two', '2020-01-01', '2020-01-01 15:10:10', '2020-01-01 15:10:10', '10:45:15', '2020', '{"key": "value"}'), + (2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '', '', '', '', '', '', '', '', '', '', '', '', '1', '', '2020-01-01', '2020-01-01 15:10:10', '2020-01-01 15:10:10', '10:45:15', '2020', '{}' ), + (3, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ); \ No newline at end of file diff --git a/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-sql.json b/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-sql.json new file mode 100644 index 0000000..a3312f3 --- /dev/null +++ b/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-sql.json @@ -0,0 +1,34 @@ +{ + "_order": "INT", + "_integer": "INT", + "_int": "INT", + "_tinyint": "TINYINT", + "_smallint": "SMALLINT", + "_mediumint": "MEDIUMINT", + "_bigint": "BIGINT", + "_decimal": "DECIMAL", + "_numeric": "DECIMAL", + "_float": "FLOAT", + "_double": "DOUBLE", + "_bit": "BIT", + "_char": "CHAR", + "_varchar": "VARCHAR", + "_bin": "BINARY", + "_varbinary": "VARBINARY", + "_blob": "BLOB", + "_tinyblob": "BLOB", + "_mediumblob": "BLOB", + "_longblob": "BLOB", + "_text": "TEXT", + "_tinytext": "TEXT", + "_mediumtext": "TEXT", + "_longtext": "TEXT", + "_enum": "CHAR", + "_set": "CHAR", + "_date": "DATE", + "_datetime": "DATETIME", + "_timestamp": "TIMESTAMP", + "_time": "TIME", + "_year": "YEAR", + "_json": "JSON" +} diff --git a/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-table.json b/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-table.json new file mode 100644 index 0000000..0be888e --- /dev/null +++ b/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-columns-table.json @@ -0,0 +1,34 @@ +{ + "_order": "int", + "_integer": "int", + "_int": "int", + "_tinyint": "tinyint", + "_smallint": "smallint", + "_mediumint": "mediumint", + "_bigint": "bigint", + "_decimal": "decimal", + "_numeric": "decimal", + "_float": "float", + "_double": "double", + "_bit": "bit", + "_char": "char", + "_varchar": "varchar", + "_bin": "binary", + "_varbinary": "varbinary", + "_blob": "blob", + "_tinyblob": "tinyblob", + "_mediumblob": "mediumblob", + "_longblob": "longblob", + "_text": "tinytext", + "_tinytext": "tinytext", + "_mediumtext": "mediumtext", + "_longtext": "longtext", + "_enum": "enum", + "_set": "set", + "_date": "date", + "_datetime": "datetime", + "_timestamp": "timestamp", + "_time": "time", + "_year": "year", + "_json": "json" +} diff --git a/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-rows.json b/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-rows.json new file mode 100644 index 0000000..d68f178 --- /dev/null +++ b/sqlconnect/internal/mysql/testdata/legacy-column-mapping-test-rows.json @@ -0,0 +1,104 @@ +[ + { + "_order": 1, + "_integer": 1, + "_int": 1, + "_tinyint": 1, + "_smallint": "MQ==", + "_mediumint": 1, + "_bigint": 1, + "_decimal": 1.1, + "_numeric": 1.1, + "_float": 1.1, + "_double": 1.1, + "_bit": "AAAAAAAAC+c=", + "_char": "a", + "_varchar": "abc", + "_bin": "YQAAAAAAAAA=", + "_varbinary": "YQ==", + "_blob": "b", + "_tinyblob": "b", + "_mediumblob": "b", + "_longblob": "b", + "_text": "t", + "_tinytext": "t", + "_mediumtext": "t", + "_longtext": "t", + "_enum": "1", + "_set": "one,two", + "_date": "2020-01-01", + "_datetime": "2020-01-01 15:10:10", + "_timestamp": "2020-01-01 15:10:10", + "_time": "10:45:15", + "_year": "2020", + "_json": "eyJrZXkiOiAidmFsdWUifQ==" + }, + { + "_order": 2, + "_integer": 0, + "_int": 0, + "_tinyint": 0, + "_smallint": "MA==", + "_mediumint": 0, + "_bigint": 0, + "_decimal": 0, + "_numeric": 0, + "_float": 0, + "_double": 0, + "_bit": "AAAAAAAAAAA=", + "_char": "", + "_varchar": "", + "_bin": "AAAAAAAAAAA=", + "_varbinary": "", + "_blob": "", + "_tinyblob": "", + "_mediumblob": "", + "_longblob": "", + "_text": "", + "_tinytext": "", + "_mediumtext": "", + "_longtext": "", + "_enum": "1", + "_set": "", + "_date": "2020-01-01", + "_datetime": "2020-01-01 15:10:10", + "_timestamp": "2020-01-01 15:10:10", + "_time": "10:45:15", + "_year": "2020", + "_json": "e30=" + }, + { + "_order": 3, + "_integer": null, + "_int": null, + "_tinyint": null, + "_smallint": null, + "_mediumint": null, + "_bigint": null, + "_decimal": null, + "_numeric": null, + "_float": null, + "_double": null, + "_bit": null, + "_char": null, + "_varchar": null, + "_bin": null, + "_varbinary": null, + "_blob": null, + "_tinyblob": null, + "_mediumblob": null, + "_longblob": null, + "_text": null, + "_tinytext": null, + "_mediumtext": null, + "_longtext": null, + "_enum": null, + "_set": null, + "_date": null, + "_datetime": null, + "_timestamp": null, + "_time": null, + "_year": null, + "_json": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/postgres/config.go b/sqlconnect/internal/postgres/config.go new file mode 100644 index 0000000..a0e6e07 --- /dev/null +++ b/sqlconnect/internal/postgres/config.go @@ -0,0 +1,46 @@ +package postgres + +import ( + "encoding/json" + "fmt" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/util" +) + +// Config used to connect to SQL Database +type Config struct { + Host string `json:"host"` + Port int `json:"port"` + DBName string `json:"dbname"` + User string `json:"user"` + Password string `json:"password"` + SSLMode string `json:"sslmode"` + + // SkipHostValidation is used to skip host validation during tests + SkipHostValidation bool `json:"skipHostValidation"` + // RudderSchema is used to override the default rudder schema name during tests + RudderSchema string `json:"rudderSchema"` + UseLegacyMappings bool `json:"useLegacyMappings"` +} + +func (c Config) ConnectionString() string { + if c.Port == 0 { + c.Port = 5432 + } + sslMode := "disable" + if c.SSLMode != "" { + sslMode = c.SSLMode + } + return fmt.Sprintf("host=%s port=%d dbname=%s user=%s password=%s sslmode=%s", c.Host, c.Port, c.DBName, c.User, c.Password, sslMode) +} + +func (c *Config) Parse(input json.RawMessage) error { + err := json.Unmarshal(input, c) + if err != nil { + return err + } + if !c.SkipHostValidation { + return util.ValidateHost(c.Host) + } + return nil +} diff --git a/sqlconnect/internal/postgres/db.go b/sqlconnect/internal/postgres/db.go new file mode 100644 index 0000000..53f7749 --- /dev/null +++ b/sqlconnect/internal/postgres/db.go @@ -0,0 +1,64 @@ +package postgres + +import ( + "database/sql" + "encoding/json" + + _ "github.com/lib/pq" // postgres driver + "github.com/samber/lo" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +const ( + DatabaseType = "postgres" + defaultRudderSchema = "_rudderstack" +) + +// NewDB creates a new postgres-specific client +func NewDB(credentialsJSON json.RawMessage) (*DB, error) { + var config Config + err := config.Parse(credentialsJSON) + if err != nil { + return nil, err + } + + db, err := sql.Open(DatabaseType, config.ConnectionString()) + if err != nil { + return nil, err + } + + return &DB{ + DB: base.NewDB( + db, + lo.Ternary(config.RudderSchema != "", config.RudderSchema, defaultRudderSchema), + base.WithColumnTypeMappings(getColumnTypeMappings(config)), + base.WithJsonRowMapper(getJonRowMapper(config)), + ), + }, nil +} + +func init() { + sqlconnect.RegisterDBFactory(DatabaseType, func(credentialsJSON json.RawMessage) (sqlconnect.DB, error) { + return NewDB(credentialsJSON) + }) +} + +type DB struct { + *base.DB +} + +func getColumnTypeMappings(config Config) map[string]string { + if config.UseLegacyMappings { + return legacyColumnTypeMappings + } + return columnTypeMappings +} + +func getJonRowMapper(config Config) func(databaseTypeName string, value any) any { + if config.UseLegacyMappings { + return legacyJsonRowMapper + } + return jsonRowMapper +} diff --git a/sqlconnect/internal/postgres/integration_test.go b/sqlconnect/internal/postgres/integration_test.go new file mode 100644 index 0000000..3e6db9a --- /dev/null +++ b/sqlconnect/internal/postgres/integration_test.go @@ -0,0 +1,38 @@ +package postgres_test + +import ( + "encoding/json" + "strconv" + "strings" + "testing" + + "github.com/ory/dockertest/v3" + "github.com/stretchr/testify/require" + + pgresource "github.com/rudderlabs/rudder-go-kit/testhelper/docker/resource/postgres" + integrationtest "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/integration_test" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/postgres" +) + +func TestPostgresDB(t *testing.T) { + pool, err := dockertest.NewPool("") + require.NoError(t, err, "it should be able to create a docker pool") + + postgresResource, err := pgresource.Setup(pool, t) + require.NoError(t, err, "it should be able to create a postgres resource") + port, err := strconv.Atoi(postgresResource.Port) + require.NoError(t, err, "it should be able to convert postgres port to int") + config := postgres.Config{ + Host: postgresResource.Host, + Port: port, + DBName: postgresResource.Database, + User: postgresResource.User, + Password: postgresResource.Password, + SSLMode: "disable", + SkipHostValidation: true, + } + configJSON, err := json.Marshal(config) + require.NoError(t, err, "it should be able to marshal config to json") + + integrationtest.TestDatabaseScenarios(t, postgres.DatabaseType, configJSON, strings.ToLower, integrationtest.Options{LegacySupport: true}) +} diff --git a/sqlconnect/internal/postgres/legacy_mappings.go b/sqlconnect/internal/postgres/legacy_mappings.go new file mode 100644 index 0000000..a2c92e5 --- /dev/null +++ b/sqlconnect/internal/postgres/legacy_mappings.go @@ -0,0 +1,56 @@ +package postgres + +import "encoding/json" + +var legacyColumnTypeMappings = map[string]string{ + "int": "int", + "int2": "int", + "int4": "int", + "int8": "int", + "integer": "int", + "smallint": "int", + "bigint": "int", + "real": "float", + "float": "float", + "float4": "float", + "float8": "float", + "numeric": "float", + "double precision": "float", + "text": "string", + "varchar": "string", + "character varying": "string", + "nchar": "string", + "bpchar": "string", + "character": "string", + "nvarchar": "string", + "string": "string", + "timestamptz": "datetime", + "timestamp without time zone": "datetime", + "timestamp with time zone": "datetime", + "timestamp": "datetime", + "boolean": "boolean", + "bool": "boolean", + "jsonb": "json", +} + +// legacyJsonRowMapper maps a row's scanned column to a json object's field +func legacyJsonRowMapper(databaseTypeName string, value any) any { + switch databaseTypeName { + case "JSON": + fallthrough + case "JSONB": + switch v := value.(type) { + case []byte: + return json.RawMessage(v) + + case string: + return json.RawMessage(v) + } + default: + switch v := value.(type) { + case []byte: + return string(v) + } + } + return value +} diff --git a/sqlconnect/internal/postgres/mappings.go b/sqlconnect/internal/postgres/mappings.go new file mode 100644 index 0000000..1aff175 --- /dev/null +++ b/sqlconnect/internal/postgres/mappings.go @@ -0,0 +1,66 @@ +package postgres + +import ( + "encoding/json" + "strconv" +) + +// mapping of database column types to rudder types +var columnTypeMappings = map[string]string{ + "int": "int", + "int2": "int", + "int4": "int", + "int8": "int", + "integer": "int", + "smallint": "int", + "bigint": "int", + "real": "float", + "float": "float", + "float4": "float", + "float8": "float", + "numeric": "float", + "double precision": "float", + "text": "string", + "varchar": "string", + "character varying": "string", + "nchar": "string", + "bpchar": "string", + "character": "string", + "nvarchar": "string", + "string": "string", + "timestamptz": "datetime", + "timestamp without time zone": "datetime", + "timestamp with time zone": "datetime", + "timestamp": "datetime", + "boolean": "boolean", + "bool": "boolean", + "json": "json", + "jsonb": "json", +} + +// jsonRowMapper maps a row's scanned column to a json object's field +func jsonRowMapper(databaseTypeName string, value any) any { + switch databaseTypeName { + case "JSON", "JSONB": + switch v := value.(type) { + case []byte: + return json.RawMessage(v) + case string: + return json.RawMessage(v) + } + case "NUMERIC": + switch v := value.(type) { + case []byte: + if n, err := strconv.ParseFloat(string(v), 64); err == nil { + return n + } + } + default: + switch v := value.(type) { + case []byte: + return string(v) + } + } + + return value +} diff --git a/sqlconnect/internal/postgres/testdata/column-mapping-test-columns.json b/sqlconnect/internal/postgres/testdata/column-mapping-test-columns.json new file mode 100644 index 0000000..7a8e21e --- /dev/null +++ b/sqlconnect/internal/postgres/testdata/column-mapping-test-columns.json @@ -0,0 +1,30 @@ +{ + "_order": "int", + "_int": "int", + "_int2": "int", + "_int4": "int", + "_int8": "int", + "_integer": "int", + "_smallint": "int", + "_bigint": "int", + "_real": "float", + "_float": "float", + "_float4": "float", + "_float8": "float", + "_numeric": "float", + "_double": "float", + "_text": "string", + "_varchar": "string", + "_charvar": "string", + "_nchar": "string", + "_bpchar": "string", + "_character": "string", + "_timestamptz": "datetime", + "_timestampntz": "datetime", + "_timestampwtz": "datetime", + "_timestamp": "datetime", + "_boolean": "boolean", + "_bool": "boolean", + "_json": "json", + "_jsonb": "json" +} \ No newline at end of file diff --git a/sqlconnect/internal/postgres/testdata/column-mapping-test-rows.json b/sqlconnect/internal/postgres/testdata/column-mapping-test-rows.json new file mode 100644 index 0000000..14bc4fd --- /dev/null +++ b/sqlconnect/internal/postgres/testdata/column-mapping-test-rows.json @@ -0,0 +1,96 @@ +[ + { + "_order": 1, + "_int": 1, + "_int2": 1, + "_int4": 1, + "_int8": 1, + "_integer": 1, + "_smallint": 1, + "_bigint": 1, + "_real": 1.1, + "_float": 1.1, + "_float4": 1.1, + "_float8": 1.1, + "_numeric": 1.1, + "_double": 1.1, + "_text": "abc", + "_varchar": "abc", + "_charvar": "abc", + "_nchar": "abc ", + "_bpchar": "abc", + "_character": "abc ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": true, + "_bool": true, + "_json": { + "a": 1 + }, + "_jsonb": { + "a": 1 + } + }, + { + "_order": 2, + "_int": 0, + "_int2": 0, + "_int4": 0, + "_int8": 0, + "_integer": 0, + "_smallint": 0, + "_bigint": 0, + "_real": 0, + "_float": 0, + "_float4": 0, + "_float8": 0, + "_numeric": 0, + "_double": 0, + "_text": "", + "_varchar": "", + "_charvar": "", + "_nchar": " ", + "_bpchar": "", + "_character": " ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": false, + "_bool": false, + "_json": {}, + "_jsonb": {} + }, + { + "_order": 3, + "_int": null, + "_int2": null, + "_int4": null, + "_int8": null, + "_integer": null, + "_smallint": null, + "_bigint": null, + "_real": null, + "_float": null, + "_float4": null, + "_float8": null, + "_numeric": null, + "_double": null, + "_text": null, + "_varchar": null, + "_charvar": null, + "_nchar": null, + "_bpchar": null, + "_character": null, + "_timestamptz": null, + "_timestampntz": null, + "_timestampwtz": null, + "_timestamp": null, + "_boolean": null, + "_bool": null, + "_json": null, + "_jsonb": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/postgres/testdata/column-mapping-test-seed.sql b/sqlconnect/internal/postgres/testdata/column-mapping-test-seed.sql new file mode 100644 index 0000000..3c88142 --- /dev/null +++ b/sqlconnect/internal/postgres/testdata/column-mapping-test-seed.sql @@ -0,0 +1,37 @@ +CREATE TABLE "{{.schema}}"."column_mappings_test" ( + _order INT, + _int INT, + _int2 INT2, + _int4 INT4, + _int8 INT8, + _integer INTEGER, + _smallint SMALLINT, + _bigint BIGINT, + _real REAL, + _float FLOAT, + _float4 FLOAT4, + _float8 FLOAT8, + _numeric NUMERIC(10,2), + _double DOUBLE PRECISION, + _text TEXT, + _varchar VARCHAR(10), + _charvar CHARACTER VARYING, + _nchar NCHAR(10), + _bpchar BPCHAR, + _character CHARACTER(10), + _timestamptz TIMESTAMPTZ, + _timestampntz TIMESTAMP WITHOUT TIME ZONE, + _timestampwtz TIMESTAMP WITH TIME ZONE, + _timestamp TIMESTAMP, + _boolean BOOLEAN, + _bool BOOL, + _json JSON, + _jsonb JSONB +); + +INSERT INTO "{{.schema}}"."column_mappings_test" + (_order, _int, _int2, _int4, _int8, _integer, _smallint, _bigint, _real, _float, _float4, _float8, _numeric, _double, _text, _varchar, _charvar, _nchar, _bpchar, _character, _timestamptz, _timestampntz, _timestampwtz, _timestamp, _boolean, _bool, _json, _jsonb) +VALUES + (1, 1, 1, 1, 1, 1, 1, 1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 'abc', 'abc', 'abc', 'abc', 'abc', 'abc', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54+02', true, true, '{"a": 1}', '{"a": 1}'), + (2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '', '', '', '', '', '', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54+02', false, false, '{}', '{}' ), + (3, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ); \ No newline at end of file diff --git a/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-sql.json b/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-sql.json new file mode 100644 index 0000000..71e3ef1 --- /dev/null +++ b/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-sql.json @@ -0,0 +1,30 @@ +{ + "_order": "int", + "_int": "int", + "_int2": "int", + "_int4": "int", + "_int8": "int", + "_integer": "int", + "_smallint": "int", + "_bigint": "int", + "_real": "float", + "_float": "float", + "_float4": "float", + "_float8": "float", + "_numeric": "float", + "_double": "float", + "_text": "string", + "_varchar": "string", + "_charvar": "string", + "_nchar": "string", + "_bpchar": "string", + "_character": "string", + "_timestamptz": "datetime", + "_timestampntz": "datetime", + "_timestampwtz": "datetime", + "_timestamp": "datetime", + "_boolean": "boolean", + "_bool": "boolean", + "_json": "JSON", + "_jsonb": "json" +} \ No newline at end of file diff --git a/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-table.json b/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-table.json new file mode 100644 index 0000000..7a8e21e --- /dev/null +++ b/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-columns-table.json @@ -0,0 +1,30 @@ +{ + "_order": "int", + "_int": "int", + "_int2": "int", + "_int4": "int", + "_int8": "int", + "_integer": "int", + "_smallint": "int", + "_bigint": "int", + "_real": "float", + "_float": "float", + "_float4": "float", + "_float8": "float", + "_numeric": "float", + "_double": "float", + "_text": "string", + "_varchar": "string", + "_charvar": "string", + "_nchar": "string", + "_bpchar": "string", + "_character": "string", + "_timestamptz": "datetime", + "_timestampntz": "datetime", + "_timestampwtz": "datetime", + "_timestamp": "datetime", + "_boolean": "boolean", + "_bool": "boolean", + "_json": "json", + "_jsonb": "json" +} \ No newline at end of file diff --git a/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-rows.json b/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-rows.json new file mode 100644 index 0000000..c90520e --- /dev/null +++ b/sqlconnect/internal/postgres/testdata/legacy-column-mapping-test-rows.json @@ -0,0 +1,96 @@ +[ + { + "_order": 1, + "_int": 1, + "_int2": 1, + "_int4": 1, + "_int8": 1, + "_integer": 1, + "_smallint": 1, + "_bigint": 1, + "_real": 1.1, + "_float": 1.1, + "_float4": 1.1, + "_float8": 1.1, + "_numeric": "1.10", + "_double": 1.1, + "_text": "abc", + "_varchar": "abc", + "_charvar": "abc", + "_nchar": "abc ", + "_bpchar": "abc", + "_character": "abc ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": true, + "_bool": true, + "_json": { + "a": 1 + }, + "_jsonb": { + "a": 1 + } + }, + { + "_order": 2, + "_int": 0, + "_int2": 0, + "_int4": 0, + "_int8": 0, + "_integer": 0, + "_smallint": 0, + "_bigint": 0, + "_real": 0, + "_float": 0, + "_float4": 0, + "_float8": 0, + "_numeric": "0.00", + "_double": 0, + "_text": "", + "_varchar": "", + "_charvar": "", + "_nchar": " ", + "_bpchar": "", + "_character": " ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": false, + "_bool": false, + "_json": {}, + "_jsonb": {} + }, + { + "_order": 3, + "_int": null, + "_int2": null, + "_int4": null, + "_int8": null, + "_integer": null, + "_smallint": null, + "_bigint": null, + "_real": null, + "_float": null, + "_float4": null, + "_float8": null, + "_numeric": null, + "_double": null, + "_text": null, + "_varchar": null, + "_charvar": null, + "_nchar": null, + "_bpchar": null, + "_character": null, + "_timestamptz": null, + "_timestampntz": null, + "_timestampwtz": null, + "_timestamp": null, + "_boolean": null, + "_bool": null, + "_json": null, + "_jsonb": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/redshift/db.go b/sqlconnect/internal/redshift/db.go new file mode 100644 index 0000000..cc50f9a --- /dev/null +++ b/sqlconnect/internal/redshift/db.go @@ -0,0 +1,74 @@ +package redshift + +import ( + "database/sql" + "encoding/json" + "fmt" + + _ "github.com/lib/pq" // postgres driver + "github.com/samber/lo" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/postgres" +) + +const ( + DatabaseType = "redshift" + defaultRudderSchema = "_rudderstack" +) + +func NewDB(credentialsJSON json.RawMessage) (*DB, error) { + var config postgres.Config + err := config.Parse(credentialsJSON) + if err != nil { + return nil, err + } + + db, err := sql.Open(postgres.DatabaseType, config.ConnectionString()) + if err != nil { + return nil, err + } + + return &DB{ + DB: base.NewDB( + db, + lo.Ternary(config.RudderSchema != "", config.RudderSchema, defaultRudderSchema), + base.WithColumnTypeMappings(getColumnTypeMappings(config)), + base.WithJsonRowMapper(getJonRowMapper(config)), + base.WithSQLCommandsOverride(func(cmds base.SQLCommands) base.SQLCommands { + cmds.ListSchemas = func() (string, string) { + return "SELECT schema_name FROM svv_redshift_schemas", "schema_name" + } + cmds.SchemaExists = func(schema string) string { + return fmt.Sprintf("SELECT schema_name FROM svv_redshift_schemas WHERE schema_name = '%[1]s'", schema) + } + return cmds + }), + ), + }, nil +} + +func init() { + sqlconnect.RegisterDBFactory(DatabaseType, func(credentialsJSON json.RawMessage) (sqlconnect.DB, error) { + return NewDB(credentialsJSON) + }) +} + +type DB struct { + *base.DB +} + +func getColumnTypeMappings(config postgres.Config) map[string]string { + if config.UseLegacyMappings { + return legacyColumnTypeMappings + } + return columnTypeMappings +} + +func getJonRowMapper(config postgres.Config) func(databaseTypeName string, value any) any { + if config.UseLegacyMappings { + return legacyJsonRowMapper + } + return jsonRowMapper +} diff --git a/sqlconnect/internal/redshift/integration_test.go b/sqlconnect/internal/redshift/integration_test.go new file mode 100644 index 0000000..251a06d --- /dev/null +++ b/sqlconnect/internal/redshift/integration_test.go @@ -0,0 +1,19 @@ +package redshift_test + +import ( + "os" + "strings" + "testing" + + integrationtest "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/integration_test" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/redshift" +) + +func TestRedshiftDB(t *testing.T) { + configJSON, ok := os.LookupEnv("REDSHIFT_TEST_ENVIRONMENT_CREDENTIALS") + if !ok { + t.Skip("skipping redshift integration test due to lack of a test environment") + } + + integrationtest.TestDatabaseScenarios(t, redshift.DatabaseType, []byte(configJSON), strings.ToLower, integrationtest.Options{LegacySupport: true}) +} diff --git a/sqlconnect/internal/redshift/legacy_mappings.go b/sqlconnect/internal/redshift/legacy_mappings.go new file mode 100644 index 0000000..722f153 --- /dev/null +++ b/sqlconnect/internal/redshift/legacy_mappings.go @@ -0,0 +1,54 @@ +package redshift + +import "encoding/json" + +var legacyColumnTypeMappings = map[string]string{ + "int": "int", + "int2": "int", + "int4": "int", + "int8": "int", + "integer": "int", + "smallint": "int", + "bigint": "int", + "real": "float", + "float": "float", + "float4": "float", + "float8": "float", + "numeric": "float", + "double precision": "float", + "boolean": "boolean", + "bool": "boolean", + "text": "string", + "varchar": "string", + "character varying": "string", + "nchar": "string", + "bpchar": "string", + "character": "string", + "nvarchar": "string", + "string": "string", + "date": "datetime", + "timestamp without time zone": "datetime", + "timestamp with time zone": "datetime", +} + +// legacyJsonRowMapper maps a row's scanned column to a json object's field +func legacyJsonRowMapper(databaseTypeName string, value any) any { + switch databaseTypeName { + case "JSON": + fallthrough + case "JSONB": + switch v := value.(type) { + case []byte: + return json.RawMessage(v) + + case string: + return json.RawMessage(v) + } + default: + switch v := value.(type) { + case []byte: + return string(v) + } + } + return value +} diff --git a/sqlconnect/internal/redshift/mappings.go b/sqlconnect/internal/redshift/mappings.go new file mode 100644 index 0000000..fbf6bdb --- /dev/null +++ b/sqlconnect/internal/redshift/mappings.go @@ -0,0 +1,56 @@ +package redshift + +import ( + "strconv" +) + +var columnTypeMappings = map[string]string{ + "int": "int", + "int2": "int", + "int4": "int", + "int8": "int", + "integer": "int", + "smallint": "int", + "bigint": "int", + "real": "float", + "float": "float", + "float4": "float", + "float8": "float", + "numeric": "float", + "double precision": "float", + "boolean": "boolean", + "bool": "boolean", + "text": "string", + "varchar": "string", + "character varying": "string", + "nchar": "string", + "bpchar": "string", + "character": "string", + "nvarchar": "string", + "string": "string", + "date": "datetime", + "timestamptz": "datetime", + "timestamp without time zone": "datetime", + "timestamp with time zone": "datetime", + "timestamp": "datetime", +} + +// jsonRowMapper maps a row's scanned column to a json object's field +func jsonRowMapper(databaseTypeName string, value any) any { + switch databaseTypeName { + case "NUMERIC": + switch v := value.(type) { + case []byte: + if n, err := strconv.ParseFloat(string(v), 64); err == nil { + return n + } + } + default: + switch v := value.(type) { + case []byte: + return string(v) + } + } + + return value +} diff --git a/sqlconnect/internal/redshift/testdata/column-mapping-test-columns.json b/sqlconnect/internal/redshift/testdata/column-mapping-test-columns.json new file mode 100644 index 0000000..61c51af --- /dev/null +++ b/sqlconnect/internal/redshift/testdata/column-mapping-test-columns.json @@ -0,0 +1,28 @@ +{ + "_order": "int", + "_int": "int", + "_int2": "int", + "_int4": "int", + "_int8": "int", + "_integer": "int", + "_smallint": "int", + "_bigint": "int", + "_real": "float", + "_float": "float", + "_float4": "float", + "_float8": "float", + "_numeric": "float", + "_double": "float", + "_text": "string", + "_varchar": "string", + "_charvar": "string", + "_nchar": "string", + "_bpchar": "string", + "_character": "string", + "_timestamptz": "datetime", + "_timestampntz": "datetime", + "_timestampwtz": "datetime", + "_timestamp": "datetime", + "_boolean": "boolean", + "_bool": "boolean" +} \ No newline at end of file diff --git a/sqlconnect/internal/redshift/testdata/column-mapping-test-rows.json b/sqlconnect/internal/redshift/testdata/column-mapping-test-rows.json new file mode 100644 index 0000000..e4eaa73 --- /dev/null +++ b/sqlconnect/internal/redshift/testdata/column-mapping-test-rows.json @@ -0,0 +1,86 @@ +[ + { + "_order": 1, + "_int": 1, + "_int2": 1, + "_int4": 1, + "_int8": 1, + "_integer": 1, + "_smallint": 1, + "_bigint": 1, + "_real": 1.1, + "_float": 1.1, + "_float4": 1.1, + "_float8": 1.1, + "_numeric": 1.1, + "_double": 1.1, + "_text": "abc", + "_varchar": "abc", + "_charvar": "abc", + "_nchar": "abc ", + "_bpchar": "abc ", + "_character": "abc ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": true, + "_bool": true + }, + { + "_order": 2, + "_int": 0, + "_int2": 0, + "_int4": 0, + "_int8": 0, + "_integer": 0, + "_smallint": 0, + "_bigint": 0, + "_real": 0, + "_float": 0, + "_float4": 0, + "_float8": 0, + "_numeric": 0, + "_double": 0, + "_text": "", + "_varchar": "", + "_charvar": "", + "_nchar": " ", + "_bpchar": " ", + "_character": " ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": false, + "_bool": false + }, + { + "_order": 3, + "_int": null, + "_int2": null, + "_int4": null, + "_int8": null, + "_integer": null, + "_smallint": null, + "_bigint": null, + "_real": null, + "_float": null, + "_float4": null, + "_float8": null, + "_numeric": null, + "_double": null, + "_text": null, + "_varchar": null, + "_charvar": null, + "_nchar": null, + "_bpchar": null, + "_character": null, + "_timestamptz": null, + "_timestampntz": null, + "_timestampwtz": null, + "_timestamp": null, + "_boolean": null, + "_bool": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/redshift/testdata/column-mapping-test-seed.sql b/sqlconnect/internal/redshift/testdata/column-mapping-test-seed.sql new file mode 100644 index 0000000..57c29aa --- /dev/null +++ b/sqlconnect/internal/redshift/testdata/column-mapping-test-seed.sql @@ -0,0 +1,35 @@ +CREATE TABLE "{{.schema}}"."column_mappings_test" ( + _order INT, + _int INT, + _int2 INT2, + _int4 INT4, + _int8 INT8, + _integer INTEGER, + _smallint SMALLINT, + _bigint BIGINT, + _real REAL, + _float FLOAT, + _float4 FLOAT4, + _float8 FLOAT8, + _numeric NUMERIC(10,2), + _double DOUBLE PRECISION, + _text TEXT, + _varchar VARCHAR(10), + _charvar CHARACTER VARYING, + _nchar NCHAR(10), + _bpchar BPCHAR, + _character CHARACTER(10), + _timestamptz TIMESTAMPTZ, + _timestampntz TIMESTAMP WITHOUT TIME ZONE, + _timestampwtz TIMESTAMP WITH TIME ZONE, + _timestamp TIMESTAMP, + _boolean BOOLEAN, + _bool BOOL +); + +INSERT INTO "{{.schema}}"."column_mappings_test" + (_order, _int, _int2, _int4, _int8, _integer, _smallint, _bigint, _real, _float, _float4, _float8, _numeric, _double, _text, _varchar, _charvar, _nchar, _bpchar, _character, _timestamptz, _timestampntz, _timestampwtz, _timestamp, _boolean, _bool) +VALUES + (1, 1, 1, 1, 1, 1, 1, 1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 'abc', 'abc', 'abc', 'abc', 'abc', 'abc', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54+02', true, true ), + (2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '', '', '', '', '', '', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54', '2004-10-19 10:23:54+02', '2004-10-19 10:23:54+02', false, false), + (3, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ); \ No newline at end of file diff --git a/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-sql.json b/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-sql.json new file mode 100644 index 0000000..bf83f39 --- /dev/null +++ b/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-sql.json @@ -0,0 +1,28 @@ +{ + "_order": "int", + "_int": "int", + "_int2": "int", + "_int4": "int", + "_int8": "int", + "_integer": "int", + "_smallint": "int", + "_bigint": "int", + "_real": "float", + "_float": "float", + "_float4": "float", + "_float8": "float", + "_numeric": "float", + "_double": "float", + "_text": "string", + "_varchar": "string", + "_charvar": "string", + "_nchar": "string", + "_bpchar": "string", + "_character": "string", + "_timestamptz": "TIMESTAMPTZ", + "_timestampntz": "TIMESTAMP", + "_timestampwtz": "TIMESTAMPTZ", + "_timestamp": "TIMESTAMP", + "_boolean": "boolean", + "_bool": "boolean" +} \ No newline at end of file diff --git a/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-table.json b/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-table.json new file mode 100644 index 0000000..61c51af --- /dev/null +++ b/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-columns-table.json @@ -0,0 +1,28 @@ +{ + "_order": "int", + "_int": "int", + "_int2": "int", + "_int4": "int", + "_int8": "int", + "_integer": "int", + "_smallint": "int", + "_bigint": "int", + "_real": "float", + "_float": "float", + "_float4": "float", + "_float8": "float", + "_numeric": "float", + "_double": "float", + "_text": "string", + "_varchar": "string", + "_charvar": "string", + "_nchar": "string", + "_bpchar": "string", + "_character": "string", + "_timestamptz": "datetime", + "_timestampntz": "datetime", + "_timestampwtz": "datetime", + "_timestamp": "datetime", + "_boolean": "boolean", + "_bool": "boolean" +} \ No newline at end of file diff --git a/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-rows.json b/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-rows.json new file mode 100644 index 0000000..b1d1428 --- /dev/null +++ b/sqlconnect/internal/redshift/testdata/legacy-column-mapping-test-rows.json @@ -0,0 +1,86 @@ +[ + { + "_order": 1, + "_int": 1, + "_int2": 1, + "_int4": 1, + "_int8": 1, + "_integer": 1, + "_smallint": 1, + "_bigint": 1, + "_real": 1.1, + "_float": 1.1, + "_float4": 1.1, + "_float8": 1.1, + "_numeric": "1.10", + "_double": 1.1, + "_text": "abc", + "_varchar": "abc", + "_charvar": "abc", + "_nchar": "abc ", + "_bpchar": "abc ", + "_character": "abc ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": true, + "_bool": true + }, + { + "_order": 2, + "_int": 0, + "_int2": 0, + "_int4": 0, + "_int8": 0, + "_integer": 0, + "_smallint": 0, + "_bigint": 0, + "_real": 0, + "_float": 0, + "_float4": 0, + "_float8": 0, + "_numeric": "0.00", + "_double": 0, + "_text": "", + "_varchar": "", + "_charvar": "", + "_nchar": " ", + "_bpchar": " ", + "_character": " ", + "_timestamptz": "2004-10-19T08:23:54Z", + "_timestampntz": "2004-10-19T10:23:54Z", + "_timestampwtz": "2004-10-19T08:23:54Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_boolean": false, + "_bool": false + }, + { + "_order": 3, + "_int": null, + "_int2": null, + "_int4": null, + "_int8": null, + "_integer": null, + "_smallint": null, + "_bigint": null, + "_real": null, + "_float": null, + "_float4": null, + "_float8": null, + "_numeric": null, + "_double": null, + "_text": null, + "_varchar": null, + "_charvar": null, + "_nchar": null, + "_bpchar": null, + "_character": null, + "_timestamptz": null, + "_timestampntz": null, + "_timestampwtz": null, + "_timestamp": null, + "_boolean": null, + "_bool": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/snowflake/config.go b/sqlconnect/internal/snowflake/config.go new file mode 100644 index 0000000..0495985 --- /dev/null +++ b/sqlconnect/internal/snowflake/config.go @@ -0,0 +1,54 @@ +package snowflake + +import ( + "encoding/json" + "fmt" + + "github.com/snowflakedb/gosnowflake" +) + +type Config struct { + Account string `json:"account"` + Warehouse string `json:"warehouse"` + DBName string `json:"dbname"` + User string `json:"user"` + Password string `json:"password"` + Schema string `json:"schema"` + Role string `json:"role"` + + // RudderSchema is used to override the default rudder schema name during tests + RudderSchema string `json:"rudderSchema"` + KeepSessionAlive bool `json:"keepSessionAlive"` + UseLegacyMappings bool `json:"useLegacyMappings"` +} + +func (c Config) ConnectionString() (dsn string, err error) { + sc := gosnowflake.Config{ + User: c.User, + Password: c.Password, + Account: c.Account, + Database: c.DBName, + Warehouse: c.Warehouse, + Schema: c.Schema, + // since omitempty is not used, default value of role would be "" (empty string). + // this will ensure backwards compatibility, check line 137 on dsn.go (if cfg.Role != "" {params.Add("role", cfg.Role)}) + Role: c.Role, + } + + if c.KeepSessionAlive { + params := make(map[string]*string) + valueTrue := "true" + params["client_session_keep_alive"] = &valueTrue + sc.Params = params + } + + dsn, err = gosnowflake.DSN(&sc) + if err != nil { + err = fmt.Errorf("creating dsn: %v", err) + } + return +} + +func (c *Config) Parse(configJSON json.RawMessage) error { + return json.Unmarshal(configJSON, c) +} diff --git a/sqlconnect/internal/snowflake/db.go b/sqlconnect/internal/snowflake/db.go new file mode 100644 index 0000000..953e298 --- /dev/null +++ b/sqlconnect/internal/snowflake/db.go @@ -0,0 +1,96 @@ +package snowflake + +import ( + "database/sql" + "encoding/json" + "fmt" + + "github.com/samber/lo" + _ "github.com/snowflakedb/gosnowflake" // snowflake driver + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +const ( + DatabaseType = "snowflake" + defaultRudderSchema = "_RUDDERSTACK" +) + +// NewDB creates a new postgres-specific client +func NewDB(configJSON json.RawMessage) (*DB, error) { + var config Config + err := config.Parse(configJSON) + if err != nil { + return nil, err + } + + connectionString, err := config.ConnectionString() + if err != nil { + return nil, err + } + db, err := sql.Open(DatabaseType, connectionString) + if err != nil { + return nil, err + } + + return &DB{ + DB: base.NewDB( + db, + lo.Ternary(config.RudderSchema != "", config.RudderSchema, defaultRudderSchema), + base.WithDialect(dialect{}), + base.WithColumnTypeMapper(getColumnTypeMapper(config)), + base.WithJsonRowMapper(getJonRowMapper(config)), + base.WithSQLCommandsOverride(func(cmds base.SQLCommands) base.SQLCommands { + cmds.ListSchemas = func() (string, string) { return "SHOW TERSE SCHEMAS", "name" } + cmds.SchemaExists = func(schema string) string { + return fmt.Sprintf("SHOW TERSE SCHEMAS LIKE '%[1]s'", schema) + } + cmds.ListTables = func(schema string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf(`SHOW TERSE TABLES IN SCHEMA "%[1]s"`, schema), B: "name"}, + } + } + cmds.ListTablesWithPrefix = func(schema, prefix string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf(`SHOW TERSE TABLES LIKE '%[2]s' IN SCHEMA "%[1]s"`, schema, prefix+"%"), B: "name"}, + } + } + cmds.TableExists = func(schema, table string) string { + return fmt.Sprintf("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%[1]s' AND TABLE_NAME = '%[2]s'", schema, table) + } + cmds.ListColumns = func(schema, table string) (string, string, string) { + return fmt.Sprintf(`DESCRIBE TABLE "%[1]s"."%[2]s"`, schema, table), "name", "type" + } + cmds.RenameTable = func(schema, oldName, newName string) string { + return fmt.Sprintf(`ALTER TABLE %[1]s.%[2]s RENAME TO %[1]s.%[3]s`, schema, oldName, newName) + } + return cmds + }), + ), + }, nil +} + +func init() { + sqlconnect.RegisterDBFactory(DatabaseType, func(credentialsJSON json.RawMessage) (sqlconnect.DB, error) { + return NewDB(credentialsJSON) + }) +} + +type DB struct { + *base.DB +} + +func getColumnTypeMapper(config Config) func(base.ColumnType) string { + if config.UseLegacyMappings { + return legacyColumnTypeMapper + } + return columnTypeMapper +} + +func getJonRowMapper(config Config) func(databaseTypeName string, value any) any { + if config.UseLegacyMappings { + return legacyJsonRowMapper + } + return jsonRowMapper +} diff --git a/sqlconnect/internal/snowflake/dialect.go b/sqlconnect/internal/snowflake/dialect.go new file mode 100644 index 0000000..867056c --- /dev/null +++ b/sqlconnect/internal/snowflake/dialect.go @@ -0,0 +1,27 @@ +package snowflake + +import ( + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +type dialect struct{} + +// QuoteTable quotes a table name +func (d dialect) QuoteTable(table sqlconnect.RelationRef) string { + if table.Schema != "" { + return d.QuoteIdentifier(table.Schema) + "." + d.QuoteIdentifier(table.Name) + } + return d.QuoteIdentifier(table.Name) +} + +// QuoteIdentifier quotes an identifier, e.g. a column name +func (d dialect) QuoteIdentifier(name string) string { + return `"` + name + `"` +} + +// FormatTableName formats a table name, typically by lower or upper casing it, depending on the database +func (d dialect) FormatTableName(name string) string { + return strings.ToUpper(name) +} diff --git a/sqlconnect/internal/snowflake/dialect_test.go b/sqlconnect/internal/snowflake/dialect_test.go new file mode 100644 index 0000000..ec66800 --- /dev/null +++ b/sqlconnect/internal/snowflake/dialect_test.go @@ -0,0 +1,30 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestDialect(t *testing.T) { + var d dialect + t.Run("format table", func(t *testing.T) { + formatted := d.FormatTableName("TaBle") + require.Equal(t, "TABLE", formatted, "table name should be uppercased") + }) + + t.Run("quote identifier", func(t *testing.T) { + quoted := d.QuoteIdentifier("column") + require.Equal(t, `"column"`, quoted, "column name should be quoted with double quotes") + }) + + t.Run("quote table", func(t *testing.T) { + quoted := d.QuoteTable(sqlconnect.NewRelationRef("table")) + require.Equal(t, `"table"`, quoted, "table name should be quoted with double quotes") + + quoted = d.QuoteTable(sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema"))) + require.Equal(t, `"schema"."table"`, quoted, "schema and table name should be quoted with double quotes") + }) +} diff --git a/sqlconnect/internal/snowflake/integration_test.go b/sqlconnect/internal/snowflake/integration_test.go new file mode 100644 index 0000000..4d59d63 --- /dev/null +++ b/sqlconnect/internal/snowflake/integration_test.go @@ -0,0 +1,19 @@ +package snowflake_test + +import ( + "os" + "strings" + "testing" + + integrationtest "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/integration_test" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/snowflake" +) + +func TestSnowflakeDB(t *testing.T) { + configJSON, ok := os.LookupEnv("SNOWFLAKE_TEST_ENVIRONMENT_CREDENTIALS") + if !ok { + t.Skip("skipping snowflake integration test due to lack of a test environment") + } + + integrationtest.TestDatabaseScenarios(t, snowflake.DatabaseType, []byte(configJSON), strings.ToUpper, integrationtest.Options{LegacySupport: true}) +} diff --git a/sqlconnect/internal/snowflake/legacy_mappings.go b/sqlconnect/internal/snowflake/legacy_mappings.go new file mode 100644 index 0000000..2111141 --- /dev/null +++ b/sqlconnect/internal/snowflake/legacy_mappings.go @@ -0,0 +1,120 @@ +package snowflake + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + "time" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +func legacyColumnTypeMapper(columnType base.ColumnType) string { + columnType.DatabaseTypeName() + columnTypeMappings := map[string]string{ + "NUMBER": "int", + "DECIMAL": "int", + "NUMERIC": "int", + "INT": "int", + "INTEGER": "int", + "BIGINT": "int", + "SMALLINT": "int", + "TINYINT": "int", + "FIXED": "float", + "FLOAT": "float", + "FLOAT4": "float", + "FLOAT8": "float", + "DOUBLE": "float", + "REAL": "float", + "DOUBLE PRECISION": "float", + "BOOLEAN": "boolean", + "TEXT": "string", + "VARCHAR": "string", + "CHAR": "string", + "CHARACTER": "string", + "STRING": "string", + "BINARY": "string", + "VARBINARY": "string", + "TIMESTAMP_NTZ": "datetime", + "DATE": "datetime", + "DATETIME": "datetime", + "TIME": "datetime", + "TIMESTAMP": "datetime", + "TIMESTAMP_LTZ": "datetime", + "TIMESTAMP_TZ": "datetime", + "VARIANT": "json", + } + databaseTypeName := strings.ToUpper(re.ReplaceAllString(columnType.DatabaseTypeName(), "")) + if mappedType, ok := columnTypeMappings[strings.ToUpper(databaseTypeName)]; ok { + return mappedType + } + return databaseTypeName +} + +// legacyJsonRowMapper maps a row's scanned column to a json object's field +func legacyJsonRowMapper(databaseTypeName string, value any) any { + if value == nil { + return nil + } + + switch databaseTypeName { + // in case of NOT string, the function returns the value itself + case "BOOLEAN": + if s, ok := value.(string); ok { + return s == "1" + } + + case "FIXED": + switch v := value.(type) { + case float64: + return v + case string: + n, err := strconv.ParseInt(value.(string), 10, 64) + if err != nil { + n, err := strconv.ParseFloat(value.(string), 64) + if err != nil { + panic(err) + } + return n + } + return n + default: + panic(fmt.Errorf("unsupported type for FIXED:%t", v)) + } + + case "OBJECT": + return json.RawMessage(value.(string)) + + case "ARRAY": + rawValue := value.(string) + if strings.HasPrefix(rawValue, "[") { // An ARRAY can contain undefined values in place of nulls which would cause json.Unmarshal to fail + var jsonValue any + if err := json.Unmarshal([]byte(rawValue), &jsonValue); err != nil { + sanitizedJSON := strings.ReplaceAll(rawValue, "undefined", "null") + return json.RawMessage(sanitizedJSON) + } + } + return json.RawMessage(rawValue) + + case "VARIANT": + return value.(string) + + case "DATE": + return value.(time.Time) + + case "TIME": + return value.(time.Time) + + case "TIMESTAMP_LTZ": + return value.(time.Time) + + case "TIMESTAMP_NTZ": + return value.(time.Time) + + case "TIMESTAMP_TZ": + return value.(time.Time) + } + + return value +} diff --git a/sqlconnect/internal/snowflake/mappings.go b/sqlconnect/internal/snowflake/mappings.go new file mode 100644 index 0000000..07d42f5 --- /dev/null +++ b/sqlconnect/internal/snowflake/mappings.go @@ -0,0 +1,127 @@ +package snowflake + +import ( + "encoding/json" + "fmt" + "regexp" + "strconv" + "strings" + "time" + + "github.com/dlclark/regexp2" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +var undefinedInArray = regexp2.MustCompile(`([\[,]\n[ ]*)undefined(?=,\n[ ]*|\n\])`, regexp2.None) + +// mapping of database column types to rudder types +var columnTypeMappings = map[string]string{ + "INT": "int", + "DECIMAL": "float", + "NUMERIC": "float", + "INTEGER": "int", + "BIGINT": "int", + "SMALLINT": "int", + "TINYINT": "int", + "FLOAT": "float", + "FLOAT4": "float", + "FLOAT8": "float", + "DOUBLE": "float", + "REAL": "float", + "DOUBLE PRECISION": "float", + "BOOLEAN": "boolean", + "TEXT": "string", + "VARCHAR": "string", + "CHAR": "string", + "CHARACTER": "string", + "STRING": "string", + "BINARY": "string", + "VARBINARY": "string", + "TIMESTAMP_NTZ": "datetime", + "DATE": "datetime", + "DATETIME": "datetime", + "TIME": "datetime", + "TIMESTAMP": "datetime", + "TIMESTAMP_LTZ": "datetime", + "TIMESTAMP_TZ": "datetime", + "VARIANT": "json", + "OBJECT": "json", + "ARRAY": "json", +} + +var ( + re = regexp.MustCompile(`(\(.+\)|<.+>)`) // remove type parameters [<>] and size constraints [()] + numberPrecision = regexp.MustCompile(`NUMBER\((?P\d+),(?P\d+)\)`) +) + +func columnTypeMapper(columnType base.ColumnType) string { + databaseTypeName := strings.ToUpper(re.ReplaceAllString(columnType.DatabaseTypeName(), "")) + if mappedType, ok := columnTypeMappings[strings.ToUpper(databaseTypeName)]; ok { + return mappedType + } + + if databaseTypeName == "NUMBER" { // [DESCRIBE TABLE] returns [NUMBER(precision,scale)] for various numeric types, including [INT] types + if matches := numberPrecision.FindStringSubmatch(columnType.DatabaseTypeName()); len(matches) > 0 { + precisionIndex := numberPrecision.SubexpIndex("precision") + if precision, err := strconv.ParseInt(matches[precisionIndex+1], 10, 64); err == nil && precision == 0 { + return "int" + } + } + return "float" + } + if databaseTypeName == "FIXED" { // When finding column types of a query, for most numeric types the driver returns [FIXED] + if precision, decimals, ok := columnType.DecimalSize(); ok && precision > 0 && decimals > 0 { + return "float" + } + return "int" + } + return databaseTypeName +} + +// check https://godoc.org/github.com/snowflakedb/gosnowflake#hdr-Supported_Data_Types for handling snowflake data types +func jsonRowMapper(databaseTypeName string, value interface{}) interface{} { + if value == nil { + return nil + } + switch databaseTypeName { + case "BOOLEAN": + if s, ok := value.(string); ok { + return s == "1" + } + case "FIXED": + switch v := value.(type) { + case float64: + return v + case string: + n, err := strconv.ParseInt(value.(string), 10, 64) + if err != nil { + n, err := strconv.ParseFloat(value.(string), 64) + if err != nil { + panic(err) + } + return n + } + return n + default: + panic(fmt.Errorf("unsupported type for FIXED:%t", v)) + } + case "OBJECT": + return json.RawMessage(value.(string)) + case "VARIANT", "ARRAY": + rawValue := value.(string) + // An ARRAY can contain undefined values in place of nulls which would cause json.Unmarshal to fail + if strings.HasPrefix(rawValue, "[") && !json.Valid([]byte(rawValue)) { + if r, err := undefinedInArray.Replace(rawValue, "${1}null", 0, -1); err == nil { + return json.RawMessage(r) + } + } + return json.RawMessage(rawValue) + case "DATE", "TIME", "TIMESTAMP", "TIMESTAMP_LTZ", "TIMESTAMP_NTZ", "TIMESTAMP_TZ": + return value.(time.Time) + case "BINARY", "VARBINARY": + return string(value.([]byte)) + } + + return value +} diff --git a/sqlconnect/internal/snowflake/mappings_test.go b/sqlconnect/internal/snowflake/mappings_test.go new file mode 100644 index 0000000..80d4daa --- /dev/null +++ b/sqlconnect/internal/snowflake/mappings_test.go @@ -0,0 +1,29 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestUndefinedInArray(t *testing.T) { + r, err := undefinedInArray.Replace("[\n 1,\n 2,\n 3,\n undefined\n]", "${1}null", 0, -1) + require.NoError(t, err) + require.Equal(t, "[\n 1,\n 2,\n 3,\n null\n]", r) + + r, err = undefinedInArray.Replace("[\n undefined,\n 1,\n 2,\n 3\n]", "${1}null", 0, -1) + require.NoError(t, err) + require.Equal(t, "[\n null,\n 1,\n 2,\n 3\n]", r) + + r, err = undefinedInArray.Replace("[\n 1,\n undefined,\n 2,\n 3\n]", "${1}null", 0, -1) + require.NoError(t, err) + require.Equal(t, "[\n 1,\n null,\n 2,\n 3\n]", r) + + r, err = undefinedInArray.Replace("[\n undefined,\n undefined,\n undefined\n]", "${1}null", 0, -1) + require.NoError(t, err) + require.Equal(t, "[\n null,\n null,\n null\n]", r) + + r, err = undefinedInArray.Replace("[\n \"undefined string\",\n 2,\n 3,\n undefined\n]", "${1}null", 0, -1) + require.NoError(t, err) + require.Equal(t, "[\n \"undefined string\",\n 2,\n 3,\n null\n]", r) +} diff --git a/sqlconnect/internal/snowflake/testdata/column-mapping-test-columns.json b/sqlconnect/internal/snowflake/testdata/column-mapping-test-columns.json new file mode 100644 index 0000000..62441bc --- /dev/null +++ b/sqlconnect/internal/snowflake/testdata/column-mapping-test-columns.json @@ -0,0 +1,35 @@ +{ + "_ORDER": "int", + "_INT": "int", + "_NUMBER": "float", + "_DECIMAL": "float", + "_NUMERIC": "float", + "_INTEGER": "int", + "_BIGINT": "int", + "_SMALLINT": "int", + "_TINYINT": "int", + "_FLOAT": "float", + "_FLOAT4": "float", + "_FLOAT8": "float", + "_DOUBLE": "float", + "_REAL": "float", + "_DOUBLE_PRECISION": "float", + "_BOOLEAN": "boolean", + "_TEXT": "string", + "_VARCHAR": "string", + "_CHAR": "string", + "_CHARACTER": "string", + "_STRING": "string", + "_BINARY": "string", + "_VARBINARY": "string", + "_DATE": "datetime", + "_DATETIME": "datetime", + "_TIME": "datetime", + "_TIMESTAMP": "datetime", + "_TIMESTAMPNTZ": "datetime", + "_TIMESTAMPLTZ": "datetime", + "_TIMESTAMPTZ": "datetime", + "_VARIANT": "json", + "_OBJECT": "json", + "_ARRAY": "json" +} \ No newline at end of file diff --git a/sqlconnect/internal/snowflake/testdata/column-mapping-test-rows.json b/sqlconnect/internal/snowflake/testdata/column-mapping-test-rows.json new file mode 100644 index 0000000..3574254 --- /dev/null +++ b/sqlconnect/internal/snowflake/testdata/column-mapping-test-rows.json @@ -0,0 +1,107 @@ +[ + { + "_ORDER": 1, + "_INT": 1, + "_NUMBER": 1.1, + "_DECIMAL": 1.1, + "_NUMERIC": 1.1, + "_INTEGER": 1, + "_BIGINT": 1, + "_SMALLINT": 1, + "_TINYINT": 1, + "_FLOAT": 1.1, + "_FLOAT4": 1.1, + "_FLOAT8": 1.1, + "_DOUBLE": 1.1, + "_REAL": 1.1, + "_DOUBLE_PRECISION": 1.1, + "_BOOLEAN": true, + "_TEXT": "t", + "_VARCHAR": "vc", + "_CHAR": "c", + "_CHARACTER": "c", + "_STRING": "s", + "_BINARY": "bin", + "_VARBINARY": "vbin", + "_DATE": "2021-07-01T00:00:00Z", + "_DATETIME": "2017-01-01T12:00:00Z", + "_TIME": "0001-01-01T12:00:00Z", + "_TIMESTAMP": "2014-01-01T16:00:00Z", + "_TIMESTAMPNTZ": "2014-01-01T16:00:00Z", + "_TIMESTAMPLTZ": "2014-01-01T16:00:00-08:00", + "_TIMESTAMPTZ": "2014-01-01T16:00:00-08:00", + "_VARIANT": {"key": "value", "key1": null}, + "_OBJECT": { "key": "value"}, + "_ARRAY": [1, 2, 3, null] + }, + { + "_ORDER": 2, + "_INT": 0, + "_NUMBER": 0, + "_DECIMAL": 0, + "_NUMERIC": 0, + "_INTEGER": 0, + "_BIGINT": 0, + "_SMALLINT": 0, + "_TINYINT": 0, + "_FLOAT": 0, + "_FLOAT4": 0, + "_FLOAT8": 0, + "_DOUBLE": 0, + "_REAL": 0, + "_DOUBLE_PRECISION": 0, + "_BOOLEAN": false, + "_TEXT": "", + "_VARCHAR": "", + "_CHAR": "", + "_CHARACTER": "", + "_STRING": "", + "_BINARY": "", + "_VARBINARY": "", + "_DATE": "2021-07-01T00:00:00Z", + "_DATETIME": "2017-01-01T12:00:00Z", + "_TIME": "0001-01-01T12:00:00Z", + "_TIMESTAMP": "2014-01-01T16:00:00Z", + "_TIMESTAMPNTZ": "2014-01-01T16:00:00Z", + "_TIMESTAMPLTZ": "2014-01-01T16:00:00-08:00", + "_TIMESTAMPTZ": "2014-01-01T16:00:00-08:00", + "_VARIANT": "string", + "_OBJECT": {}, + "_ARRAY": [] + }, + { + "_ORDER": 3, + "_INT": null, + "_NUMBER": null, + "_DECIMAL": null, + "_NUMERIC": null, + "_INTEGER": null, + "_BIGINT": null, + "_SMALLINT": null, + "_TINYINT": null, + "_FLOAT": null, + "_FLOAT4": null, + "_FLOAT8": null, + "_DOUBLE": null, + "_REAL": null, + "_DOUBLE_PRECISION": null, + "_BOOLEAN": null, + "_TEXT": null, + "_VARCHAR": null, + "_CHAR": null, + "_CHARACTER": null, + "_STRING": null, + "_BINARY": null, + "_VARBINARY": null, + "_DATE": null, + "_DATETIME": null, + "_TIME": null, + "_TIMESTAMP": null, + "_TIMESTAMPNTZ": null, + "_TIMESTAMPLTZ": null, + "_TIMESTAMPTZ": null, + "_VARIANT": null, + "_OBJECT": null, + "_ARRAY": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/snowflake/testdata/column-mapping-test-seed.sql b/sqlconnect/internal/snowflake/testdata/column-mapping-test-seed.sql new file mode 100644 index 0000000..8752d2d --- /dev/null +++ b/sqlconnect/internal/snowflake/testdata/column-mapping-test-seed.sql @@ -0,0 +1,51 @@ +CREATE TABLE "{{.schema}}"."COLUMN_MAPPINGS_TEST" ( + _order INT, + _int INT, + _number NUMBER(10,2), + _decimal DECIMAL(10,2), + _numeric NUMERIC(10,2), + _integer INTEGER, + _bigint BIGINT, + _smallint SMALLINT, + _tinyint TINYINT, + _float FLOAT, + _float4 FLOAT4, + _float8 FLOAT8, + _double DOUBLE, + _real REAL, + _double_precision DOUBLE PRECISION, + _boolean BOOLEAN, + _text TEXT, + _varchar VARCHAR, + _char CHAR, + _character CHARACTER, + _string STRING, + _binary BINARY, + _varbinary VARBINARY, + _date DATE, + _datetime DATETIME, + _time TIME, + _timestamp TIMESTAMP, + _timestampntz TIMESTAMP_NTZ, + _timestampltz TIMESTAMP_LTZ, + _timestamptz TIMESTAMP_TZ, + _variant VARIANT, + _object OBJECT, + _array ARRAY +); + +INSERT INTO "{{.schema}}"."COLUMN_MAPPINGS_TEST" + (_order, _int, _number, _decimal, _numeric, _integer, _bigint, _smallint, _tinyint, _float, _float4, _float8, _double, _real, _double_precision, _boolean, _text, _varchar, _char, _character, _string, _binary, _varbinary, _date, _datetime, _time, _timestamp, _timestampntz, _timestampltz, _timestamptz, _variant, _object, _array) +SELECT + 1, 1, 1.1, 1.1, 1.1, 1, 1, 1, 1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, true, 't', 'vc', 'c', 'c', 's', TO_BINARY('bin', 'UTF-8'), TO_BINARY('vbin', 'UTF-8'), '2021-7-1', '2017-01-01 12:00:00', '12:00:00', '2014-01-01 16:00:00', '2014-01-01 16:00:00', '2014-01-01 16:00:00', '2014-01-01 16:00:00', TO_VARIANT(PARSE_JSON('{"key": "value", "key1": null}')), object_construct('key', 'value', 'key1', null), array_construct(1,2,3,null); + +INSERT INTO "{{.schema}}"."COLUMN_MAPPINGS_TEST" + (_order, _int, _number, _decimal, _numeric, _integer, _bigint, _smallint, _tinyint, _float, _float4, _float8, _double, _real, _double_precision, _boolean, _text, _varchar, _char, _character, _string, _binary, _varbinary, _date, _datetime, _time, _timestamp, _timestampntz, _timestampltz, _timestamptz, _variant, _object, _array) +SELECT + 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, '', '', '', '', '', '', '', '2021-7-1', '2017-01-01 12:00:00', '12:00:00', '2014-01-01 16:00:00', '2014-01-01 16:00:00', '2014-01-01 16:00:00', '2014-01-01 16:00:00', 'string'::VARIANT, object_construct(), array_construct(); + + +INSERT INTO "{{.schema}}"."COLUMN_MAPPINGS_TEST" + (_order, _int, _number, _decimal, _numeric, _integer, _bigint, _smallint, _tinyint, _float, _float4, _float8, _double, _real, _double_precision, _boolean, _text, _varchar, _char, _character, _string, _binary, _varbinary, _date, _datetime, _time, _timestamp, _timestampntz, _timestampltz, _timestamptz, _variant, _object, _array) +SELECT + 3, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL; \ No newline at end of file diff --git a/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-sql.json b/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-sql.json new file mode 100644 index 0000000..711dca7 --- /dev/null +++ b/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-sql.json @@ -0,0 +1,35 @@ +{ + "_ORDER": "float", + "_INT": "float", + "_NUMBER": "float", + "_DECIMAL": "float", + "_NUMERIC": "float", + "_INTEGER": "float", + "_BIGINT": "float", + "_SMALLINT": "float", + "_TINYINT": "float", + "_FLOAT": "float", + "_FLOAT4": "float", + "_FLOAT8": "float", + "_DOUBLE": "float", + "_REAL": "float", + "_DOUBLE_PRECISION": "float", + "_BOOLEAN": "boolean", + "_TEXT": "string", + "_VARCHAR": "string", + "_CHAR": "string", + "_CHARACTER": "string", + "_STRING": "string", + "_BINARY": "string", + "_VARBINARY": "string", + "_DATE": "datetime", + "_DATETIME": "datetime", + "_TIME": "datetime", + "_TIMESTAMP": "datetime", + "_TIMESTAMPNTZ": "datetime", + "_TIMESTAMPLTZ": "datetime", + "_TIMESTAMPTZ": "datetime", + "_VARIANT": "json", + "_OBJECT": "OBJECT", + "_ARRAY": "ARRAY" +} \ No newline at end of file diff --git a/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-table.json b/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-table.json new file mode 100644 index 0000000..dcf73ec --- /dev/null +++ b/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-columns-table.json @@ -0,0 +1,35 @@ +{ + "_ORDER": "int", + "_INT": "int", + "_NUMBER": "int", + "_DECIMAL": "int", + "_NUMERIC": "int", + "_INTEGER": "int", + "_BIGINT": "int", + "_SMALLINT": "int", + "_TINYINT": "int", + "_FLOAT": "float", + "_FLOAT4": "float", + "_FLOAT8": "float", + "_DOUBLE": "float", + "_REAL": "float", + "_DOUBLE_PRECISION": "float", + "_BOOLEAN": "boolean", + "_TEXT": "string", + "_VARCHAR": "string", + "_CHAR": "string", + "_CHARACTER": "string", + "_STRING": "string", + "_BINARY": "string", + "_VARBINARY": "string", + "_DATE": "datetime", + "_DATETIME": "datetime", + "_TIME": "datetime", + "_TIMESTAMP": "datetime", + "_TIMESTAMPNTZ": "datetime", + "_TIMESTAMPLTZ": "datetime", + "_TIMESTAMPTZ": "datetime", + "_VARIANT": "json", + "_OBJECT": "OBJECT", + "_ARRAY": "ARRAY" +} \ No newline at end of file diff --git a/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-rows.json b/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-rows.json new file mode 100644 index 0000000..a0fd51e --- /dev/null +++ b/sqlconnect/internal/snowflake/testdata/legacy-column-mapping-test-rows.json @@ -0,0 +1,107 @@ +[ + { + "_ORDER": 1, + "_INT": 1, + "_NUMBER": 1.1, + "_DECIMAL": 1.1, + "_NUMERIC": 1.1, + "_INTEGER": 1, + "_BIGINT": 1, + "_SMALLINT": 1, + "_TINYINT": 1, + "_FLOAT": 1.1, + "_FLOAT4": 1.1, + "_FLOAT8": 1.1, + "_DOUBLE": 1.1, + "_REAL": 1.1, + "_DOUBLE_PRECISION": 1.1, + "_BOOLEAN": true, + "_TEXT": "t", + "_VARCHAR": "vc", + "_CHAR": "c", + "_CHARACTER": "c", + "_STRING": "s", + "_BINARY": "Ymlu", + "_VARBINARY": "dmJpbg==", + "_DATE": "2021-07-01T00:00:00Z", + "_DATETIME": "2017-01-01T12:00:00Z", + "_TIME": "0001-01-01T12:00:00Z", + "_TIMESTAMP": "2014-01-01T16:00:00Z", + "_TIMESTAMPNTZ": "2014-01-01T16:00:00Z", + "_TIMESTAMPLTZ": "2014-01-01T16:00:00-08:00", + "_TIMESTAMPTZ": "2014-01-01T16:00:00-08:00", + "_VARIANT": "{\n \"key\": \"value\",\n \"key1\": null\n}", + "_OBJECT": { "key": "value"}, + "_ARRAY": [1, 2, 3, null] + }, + { + "_ORDER": 2, + "_INT": 0, + "_NUMBER": 0, + "_DECIMAL": 0, + "_NUMERIC": 0, + "_INTEGER": 0, + "_BIGINT": 0, + "_SMALLINT": 0, + "_TINYINT": 0, + "_FLOAT": 0, + "_FLOAT4": 0, + "_FLOAT8": 0, + "_DOUBLE": 0, + "_REAL": 0, + "_DOUBLE_PRECISION": 0, + "_BOOLEAN": false, + "_TEXT": "", + "_VARCHAR": "", + "_CHAR": "", + "_CHARACTER": "", + "_STRING": "", + "_BINARY": "", + "_VARBINARY": "", + "_DATE": "2021-07-01T00:00:00Z", + "_DATETIME": "2017-01-01T12:00:00Z", + "_TIME": "0001-01-01T12:00:00Z", + "_TIMESTAMP": "2014-01-01T16:00:00Z", + "_TIMESTAMPNTZ": "2014-01-01T16:00:00Z", + "_TIMESTAMPLTZ": "2014-01-01T16:00:00-08:00", + "_TIMESTAMPTZ": "2014-01-01T16:00:00-08:00", + "_VARIANT": "\"string\"", + "_OBJECT": {}, + "_ARRAY": [] + }, + { + "_ORDER": 3, + "_INT": null, + "_NUMBER": null, + "_DECIMAL": null, + "_NUMERIC": null, + "_INTEGER": null, + "_BIGINT": null, + "_SMALLINT": null, + "_TINYINT": null, + "_FLOAT": null, + "_FLOAT4": null, + "_FLOAT8": null, + "_DOUBLE": null, + "_REAL": null, + "_DOUBLE_PRECISION": null, + "_BOOLEAN": null, + "_TEXT": null, + "_VARCHAR": null, + "_CHAR": null, + "_CHARACTER": null, + "_STRING": null, + "_BINARY": null, + "_VARBINARY": null, + "_DATE": null, + "_DATETIME": null, + "_TIME": null, + "_TIMESTAMP": null, + "_TIMESTAMPNTZ": null, + "_TIMESTAMPLTZ": null, + "_TIMESTAMPTZ": null, + "_VARIANT": null, + "_OBJECT": null, + "_ARRAY": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/trino/config.go b/sqlconnect/internal/trino/config.go new file mode 100644 index 0000000..728ede7 --- /dev/null +++ b/sqlconnect/internal/trino/config.go @@ -0,0 +1,54 @@ +package trino + +import ( + "encoding/json" + "fmt" + "net/url" + + "github.com/trinodb/trino-go-client/trino" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/util" +) + +type Config struct { + Host string `json:"host"` + Port int `json:"port"` + Catalog string `json:"catalog"` + User string `json:"user"` + Password string `json:"password"` + + // RudderSchema is used to override the default rudder schema name during tests + RudderSchema string `json:"rudderSchema"` +} + +func (c Config) ConnectionString() (string, error) { + uri := func() string { + hostport := c.Host + if c.Port != 0 { + hostport = fmt.Sprintf("%s:%v", c.Host, c.Port) + } + uri := url.URL{ + Scheme: "https", + User: url.UserPassword(c.User, c.Password), + Host: hostport, + } + return uri.String() + }() + config := trino.Config{ + ServerURI: uri, + Catalog: c.Catalog, + } + dsn, err := config.FormatDSN() + if err != nil { + return "", fmt.Errorf("formatting dsn: %w", err) + } + return dsn, nil +} + +func (c *Config) Parse(input json.RawMessage) error { + err := json.Unmarshal(input, c) + if err != nil { + return err + } + return util.ValidateHost(c.Host) +} diff --git a/sqlconnect/internal/trino/db.go b/sqlconnect/internal/trino/db.go new file mode 100644 index 0000000..3db24ee --- /dev/null +++ b/sqlconnect/internal/trino/db.go @@ -0,0 +1,84 @@ +package trino + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + + "github.com/samber/lo" + _ "github.com/trinodb/trino-go-client/trino" // trino driver + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +const ( + DatabaseType = "trino" + defaultRudderSchema = "_rudderstack" +) + +// NewDB creates a new postgres-specific client +func NewDB(configJSON json.RawMessage) (*DB, error) { + var config Config + err := config.Parse(configJSON) + if err != nil { + return nil, err + } + + dsn, err := config.ConnectionString() + if err != nil { + return nil, err + } + db, err := sql.Open(DatabaseType, dsn) + if err != nil { + return nil, err + } + + return &DB{ + DB: base.NewDB( + db, + lo.Ternary(config.RudderSchema != "", config.RudderSchema, defaultRudderSchema), + base.WithColumnTypeMapper(columnTypeMapper), + base.WithJsonRowMapper(jsonRowMapper), + base.WithSQLCommandsOverride(func(cmds base.SQLCommands) base.SQLCommands { + cmds.ListTables = func(schema string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SHOW TABLES FROM %[1]s", schema), B: "tableName"}, + } + } + cmds.ListTablesWithPrefix = func(schema, prefix string) []lo.Tuple2[string, string] { + return []lo.Tuple2[string, string]{ + {A: fmt.Sprintf("SHOW TABLES FROM %[1]s LIKE '%[2]s'", schema, prefix+"%"), B: "tableName"}, + } + } + cmds.TableExists = func(schema, table string) string { + return fmt.Sprintf("SHOW TABLES FROM %[1]s LIKE '%[2]s'", schema, table) + } + cmds.TruncateTable = func(table string) string { + return fmt.Sprintf("DELETE FROM %[1]s", table) + } + return cmds + }), + ), + }, nil +} + +func init() { + sqlconnect.RegisterDBFactory(DatabaseType, func(credentialsJSON json.RawMessage) (sqlconnect.DB, error) { + return NewDB(credentialsJSON) + }) +} + +type DB struct { + *base.DB +} + +func (db *DB) Ping() error { + return db.PingContext(context.Background()) +} + +func (db *DB) PingContext(ctx context.Context) error { + _, err := db.ExecContext(ctx, "select 1") + return err +} diff --git a/sqlconnect/internal/trino/integration_test.go b/sqlconnect/internal/trino/integration_test.go new file mode 100644 index 0000000..0064986 --- /dev/null +++ b/sqlconnect/internal/trino/integration_test.go @@ -0,0 +1,20 @@ +package trino_test + +import ( + "os" + "strings" + "testing" + + integrationtest "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/integration_test" + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/trino" +) + +func TestTrinoDB(t *testing.T) { + t.Setenv("TZ", "UTC") // set timezone to UTC for consistent datetime tests + configJSON, ok := os.LookupEnv("TRINO_TEST_ENVIRONMENT_CREDENTIALS") + if !ok { + t.Skip("skipping trino integration test due to lack of a test environment") + } + + integrationtest.TestDatabaseScenarios(t, trino.DatabaseType, []byte(configJSON), strings.ToLower, integrationtest.Options{}) +} diff --git a/sqlconnect/internal/trino/mappings.go b/sqlconnect/internal/trino/mappings.go new file mode 100644 index 0000000..5321b92 --- /dev/null +++ b/sqlconnect/internal/trino/mappings.go @@ -0,0 +1,75 @@ +package trino + +import ( + "regexp" + "strconv" + "strings" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/base" +) + +// mapping of database column types to rudder types +var columnTypeMappings = map[string]string{ + "BOOLEAN": "boolean", + "TINYINT": "int", + "SMALLINT": "int", + "INTEGER": "int", + "INT": "int", + "BIGINT": "int", + + "REAL": "float", + "DOUBLE": "float", + "DECIMAL": "float", + + "VARCHAR": "string", + "CHAR": "string", + "VARBINARY": "string", + + "DATE": "datetime", + "TIME": "datetime", + "TIMESTAMP": "datetime", + "TIME WITH TIME ZONE": "datetime", + "TIMESTAMP WITH TIME ZONE": "datetime", + + "JSON": "json", + "ARRAY": "json", + "MAP": "json", +} + +var re = regexp.MustCompile(`(\(.+\)|<.+>)`) // remove type parameters [<>] and size constraints [()] + +func columnTypeMapper(columnType base.ColumnType) string { + databaseTypeName := strings.ToUpper(re.ReplaceAllString(columnType.DatabaseTypeName(), "")) + if mappedType, ok := columnTypeMappings[strings.ToUpper(databaseTypeName)]; ok { + return mappedType + } + + // TODO: is this still needed? + if strings.Contains(databaseTypeName, "CHAR") || strings.Contains(databaseTypeName, "VARCHAR") { + return "string" + } else if strings.Contains(databaseTypeName, "TIMESTAMP") { + return "datetime" + } else if strings.Contains(databaseTypeName, "DECIMAL") { + return "float" + } + return databaseTypeName +} + +// jsonRowMapper maps a row's scanned column to a json object's field +func jsonRowMapper(databaseTypeName string, value any) any { + switch databaseTypeName { + case "DECIMAL": + switch v := value.(type) { + case string: + if value, err := strconv.ParseFloat(v, 64); err == nil { + return value + } + } + default: + switch v := value.(type) { + case []byte: + return string(v) + } + } + return value +} diff --git a/sqlconnect/internal/trino/testdata/column-mapping-test-columns.json b/sqlconnect/internal/trino/testdata/column-mapping-test-columns.json new file mode 100644 index 0000000..bf0c796 --- /dev/null +++ b/sqlconnect/internal/trino/testdata/column-mapping-test-columns.json @@ -0,0 +1,19 @@ +{ + "_order": "int", + "_int": "int", + "_tinyint": "int", + "_smallint": "int", + "_integer": "int", + "_bigint": "int", + "_real": "float", + "_double": "float", + "_decimal": "float", + "_varchar": "string", + "_char": "string", + "_varbinary": "string", + "_boolean": "boolean", + "_date": "datetime", + "_timestamp": "datetime", + "_array": "json", + "_map": "json" +} \ No newline at end of file diff --git a/sqlconnect/internal/trino/testdata/column-mapping-test-rows.json b/sqlconnect/internal/trino/testdata/column-mapping-test-rows.json new file mode 100644 index 0000000..5ea7ae3 --- /dev/null +++ b/sqlconnect/internal/trino/testdata/column-mapping-test-rows.json @@ -0,0 +1,69 @@ +[ + { + "_order": 1, + "_int": 1, + "_tinyint": 1, + "_smallint": 1, + "_integer": 1, + "_bigint": 1, + "_real": 1.1, + "_double": 1.1, + "_decimal": 1.1, + "_varchar": "abc", + "_char": "abc", + "_varbinary": "YWJj", + "_boolean": true, + "_date": "2004-10-19T00:00:00Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_array": [ + 1, + 2, + 3 + ], + "_map": { + "bar": 2, + "foo": 1 + } + }, + { + "_order": 2, + "_int": 0, + "_tinyint": 0, + "_smallint": 0, + "_integer": 0, + "_bigint": 0, + "_real": 0, + "_double": 0, + "_decimal": 0, + "_varchar": "", + "_char": " ", + "_varbinary": "", + "_boolean": false, + "_date": "2004-10-19T00:00:00Z", + "_timestamp": "2004-10-19T10:23:54Z", + "_array": [], + "_map": { + "bar": 2, + "foo": 1 + } + }, + { + "_order": 3, + "_int": null, + "_tinyint": null, + "_smallint": null, + "_integer": null, + "_bigint": null, + "_real": null, + "_double": null, + "_decimal": null, + "_varchar": null, + "_char": null, + "_varbinary": null, + "_boolean": null, + "_date": null, + "_timestamp": null, + "_array": null, + "_map": null + } +] \ No newline at end of file diff --git a/sqlconnect/internal/trino/testdata/column-mapping-test-seed.sql b/sqlconnect/internal/trino/testdata/column-mapping-test-seed.sql new file mode 100644 index 0000000..0fca1b1 --- /dev/null +++ b/sqlconnect/internal/trino/testdata/column-mapping-test-seed.sql @@ -0,0 +1,26 @@ +CREATE TABLE "{{.schema}}"."column_mappings_test" ( + _order INT, + _int INT, + _tinyint TINYINT, + _smallint SMALLINT, + _integer INTEGER, + _bigint BIGINT, + _real REAL, + _double DOUBLE, + _decimal DECIMAL(2,1), + _varchar VARCHAR(3), + _char CHAR(3), + _varbinary VARBINARY, + _boolean BOOLEAN, + _date DATE, + _timestamp TIMESTAMP, + _array ARRAY, + _map MAP +); + +INSERT INTO "{{.schema}}"."column_mappings_test" + (_order, _int, _tinyint, _smallint, _integer, _bigint, _real, _double, _decimal, _varchar, _char, _varbinary, _boolean, _date, _timestamp, _array, _map) +VALUES + (1, 1, TINYINT '1', SMALLINT '1', 1, BIGINT '1', REAL '1.1', DOUBLE '1.1', DECIMAL '1.1', 'abc', CHAR 'abc', VARBINARY 'abc', true, DATE '2004-10-19', TIMESTAMP '2004-10-19 10:23:54 UTC', ARRAY[1, 2, 3], MAP(ARRAY['foo', 'bar'], ARRAY[1, 2]) ), + (2, 0, TINYINT '0', SMALLINT '0', 0, BIGINT '0', REAL '0', DOUBLE '0', DECIMAL '0', '', CHAR '', VARBINARY '', false, DATE '2004-10-19', TIMESTAMP '2004-10-19 10:23:54 UTC', ARRAY[], MAP(ARRAY['foo', 'bar'], ARRAY[1, 2]) ), + (3, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ); \ No newline at end of file diff --git a/sqlconnect/internal/util/validatehost.go b/sqlconnect/internal/util/validatehost.go new file mode 100644 index 0000000..240759c --- /dev/null +++ b/sqlconnect/internal/util/validatehost.go @@ -0,0 +1,21 @@ +package util + +import ( + "fmt" + "net" +) + +// ValidateHost checks if the hostname is resolvable and that it doesn't correspond to localhost. +func ValidateHost(hostname string) error { + addrs, err := net.LookupHost(hostname) + if err != nil { + return fmt.Errorf("error looking up hostname %s: %v", hostname, err) + } + + for _, addr := range addrs { + if addr == "127.0.0.1" || addr == "0.0.0.0" { + return fmt.Errorf("invalid host name in credentials") + } + } + return nil +} diff --git a/sqlconnect/internal/util/validatehost_test.go b/sqlconnect/internal/util/validatehost_test.go new file mode 100644 index 0000000..fc2b38b --- /dev/null +++ b/sqlconnect/internal/util/validatehost_test.go @@ -0,0 +1,26 @@ +package util_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/util" +) + +func TestValidateHost(t *testing.T) { + t.Run("valid host", func(t *testing.T) { + err := util.ValidateHost("github.com") + require.NoError(t, err) + }) + + t.Run("invalid host", func(t *testing.T) { + err := util.ValidateHost("!@#$.$%^") + require.Error(t, err) + }) + + t.Run("localhost", func(t *testing.T) { + err := util.ValidateHost("localhost") + require.Error(t, err) + }) +} diff --git a/sqlconnect/querydef.go b/sqlconnect/querydef.go new file mode 100644 index 0000000..fc22541 --- /dev/null +++ b/sqlconnect/querydef.go @@ -0,0 +1,56 @@ +package sqlconnect + +import ( + "fmt" + "strings" + + "github.com/samber/lo" +) + +// QueryDef describes a query that consists of a table and columns that should be queried. +type QueryDef struct { + Table RelationRef `json:"table"` // Reference to table that should be queried + Columns []string `json:"columns,omitempty"` // Columns that should be included. Defaults to "*" if nil or empty. + Conditions []*QueryCondition `json:"conditions,omitempty"` // Conditions is a list of query conditions. + OrderBy *QueryOrder `json:"order_by,omitempty"` // OrderBy defines the query's order by clause. +} + +// QueryCondition defines a query condition. +type QueryCondition struct { + Column string `json:"column,omitempty"` + Operator string `json:"operator,omitempty"` + Value string `json:"value,omitempty"` +} + +// QueryOrder defines the query's order by clause.This only supports one order by column. +type QueryOrder struct { + Column string // the order by column + Order string // supported values are ('ASC', 'DESC') +} + +func (query *QueryDef) ToSQL(d Dialect) string { + var cols string + if query.Columns == nil || len(query.Columns) == 0 { + cols = "*" + } else { + for i, column := range query.Columns { + cols += d.QuoteIdentifier(column) + if i < len(query.Columns)-1 { + cols += "," + } + } + } + // create data query + sql := fmt.Sprintf("SELECT %s FROM %s", cols, d.QuoteTable(query.Table)) + // add condition clauses + if len(query.Conditions) > 0 { + sql += " WHERE " + strings.Join(lo.Map(query.Conditions, func(condition *QueryCondition, _ int) string { + return fmt.Sprintf(`%s %s %s`, d.QuoteIdentifier(condition.Column), condition.Operator, condition.Value) + }), " AND ") + } + // add order by clause + if query.OrderBy != nil { + sql += fmt.Sprintf(` ORDER BY %s %s`, d.QuoteIdentifier(query.OrderBy.Column), query.OrderBy.Order) + } + return sql +} diff --git a/sqlconnect/querydef_test.go b/sqlconnect/querydef_test.go new file mode 100644 index 0000000..1b6f3ca --- /dev/null +++ b/sqlconnect/querydef_test.go @@ -0,0 +1,64 @@ +package sqlconnect_test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestQueryDef(t *testing.T) { + t.Run("with columns", func(t *testing.T) { + table := sqlconnect.NewRelationRef("table") + q := sqlconnect.QueryDef{ + Table: table, + Columns: []string{"col1", "col2"}, + Conditions: []*sqlconnect.QueryCondition{ + {Column: "col1", Operator: "=", Value: "'1'"}, + {Column: "col2", Operator: ">", Value: "2"}, + }, + OrderBy: &sqlconnect.QueryOrder{ + Column: "col1", + Order: "ASC", + }, + } + + sql := q.ToSQL(testDialect{}) + expected := `SELECT "col1","col2" FROM "table" WHERE "col1" = '1' AND "col2" > 2 ORDER BY "col1" ASC` + require.Equal(t, expected, sql, "query should be formatted correctly") + }) + + t.Run("without columns", func(t *testing.T) { + table := sqlconnect.NewRelationRef("table") + q := sqlconnect.QueryDef{ + Table: table, + Conditions: []*sqlconnect.QueryCondition{ + {Column: "col1", Operator: "=", Value: "'1'"}, + {Column: "col2", Operator: ">", Value: "2"}, + }, + } + + sql := q.ToSQL(testDialect{}) + expected := `SELECT * FROM "table" WHERE "col1" = '1' AND "col2" > 2` + require.Equal(t, expected, sql, "query should be formatted correctly") + }) +} + +type testDialect struct{} + +func (d testDialect) FormatTableName(name string) string { + return name +} + +func (d testDialect) QuoteIdentifier(name string) string { + return fmt.Sprintf(`"%s"`, name) +} + +func (d testDialect) QuoteTable(relation sqlconnect.RelationRef) string { + if relation.Schema != "" { + return fmt.Sprintf(`"%s"."%s"`, relation.Schema, relation.Name) + } + return fmt.Sprintf(`"%s"`, relation.Name) +} diff --git a/sqlconnect/relationref.go b/sqlconnect/relationref.go new file mode 100644 index 0000000..b8c96fc --- /dev/null +++ b/sqlconnect/relationref.go @@ -0,0 +1,69 @@ +package sqlconnect + +import ( + "encoding/json" + "fmt" +) + +func NewRelationRef(name string, options ...Option) RelationRef { + var o RelationRefOption + for _, option := range options { + option(&o) + } + + relationType := TableRelation + if o.Type != "" { + relationType = o.Type + } + return RelationRef{ + Name: name, + Schema: o.Schema, + Catalog: o.Catalog, + Type: relationType, + } +} + +type RelationType string + +const ( + TableRelation RelationType = "table" + ViewRelation RelationType = "view" +) + +// RelationRef provides a reference to a database table +type RelationRef struct { + Name string `json:"name"` // the relation's name + Schema string `json:"schema,omitempty"` // the relation's schema + Catalog string `json:"catalog,omitempty"` // the relation's catalog + Type RelationType `json:"type,omitempty"` // the relation's type +} + +func (t *RelationRef) String() string { + if t.Catalog != "" && t.Schema != "" { + return fmt.Sprintf("%s.%s.%s", t.Catalog, t.Schema, t.Name) + } + if t.Schema != "" { + return fmt.Sprintf("%s.%s", t.Schema, t.Name) + } + + return t.Name +} + +func (r *RelationRef) UnmarshalJSON(data []byte) error { + var rawRelationRef struct { + Name string `json:"name"` + Schema string `json:"schema,omitempty"` + Catalog string `json:"catalog,omitempty"` + Type RelationType `json:"type"` + } + err := json.Unmarshal(data, &rawRelationRef) + if err != nil { + return fmt.Errorf("failed to unmarshal RelationRef: %w", err) + } + if rawRelationRef.Type == "" { + rawRelationRef.Type = TableRelation + } + + *r = NewRelationRef(rawRelationRef.Name, WithSchema(rawRelationRef.Schema), WithCatalog(rawRelationRef.Catalog), WithRelationType(rawRelationRef.Type)) + return nil +} diff --git a/sqlconnect/relationref_opts.go b/sqlconnect/relationref_opts.go new file mode 100644 index 0000000..b19d824 --- /dev/null +++ b/sqlconnect/relationref_opts.go @@ -0,0 +1,27 @@ +package sqlconnect + +type Option func(options *RelationRefOption) + +type RelationRefOption struct { + Schema string + Catalog string + Type RelationType +} + +func WithSchema(schema string) Option { + return func(options *RelationRefOption) { + options.Schema = schema + } +} + +func WithCatalog(catalog string) Option { + return func(options *RelationRefOption) { + options.Catalog = catalog + } +} + +func WithRelationType(relationType RelationType) Option { + return func(options *RelationRefOption) { + options.Type = relationType + } +} diff --git a/sqlconnect/relationref_test.go b/sqlconnect/relationref_test.go new file mode 100644 index 0000000..592c04c --- /dev/null +++ b/sqlconnect/relationref_test.go @@ -0,0 +1,60 @@ +package sqlconnect_test + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestRelationRef(t *testing.T) { + t.Run("name", func(t *testing.T) { + ref := sqlconnect.NewRelationRef("table") + require.Equal(t, sqlconnect.RelationRef{Name: "table", Type: "table"}, ref) + require.Equal(t, "table", ref.String()) + + refJSON, _ := json.Marshal(ref) + var ref1 sqlconnect.RelationRef + err := ref1.UnmarshalJSON(refJSON) + require.NoError(t, err) + require.Equal(t, ref, ref1) + }) + + t.Run("name and schema", func(t *testing.T) { + ref := sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema")) + require.Equal(t, sqlconnect.RelationRef{Name: "table", Schema: "schema", Type: "table"}, ref) + require.Equal(t, "schema.table", ref.String()) + + refJSON, _ := json.Marshal(ref) + var ref1 sqlconnect.RelationRef + err := ref1.UnmarshalJSON(refJSON) + require.NoError(t, err) + require.Equal(t, ref, ref1) + }) + + t.Run("name and schema and catalog", func(t *testing.T) { + ref := sqlconnect.NewRelationRef("table", sqlconnect.WithSchema("schema"), sqlconnect.WithCatalog("catalog")) + require.Equal(t, sqlconnect.RelationRef{Name: "table", Schema: "schema", Catalog: "catalog", Type: "table"}, ref) + require.Equal(t, "catalog.schema.table", ref.String()) + + refJSON, _ := json.Marshal(ref) + var ref1 sqlconnect.RelationRef + err := ref1.UnmarshalJSON(refJSON) + require.NoError(t, err) + require.Equal(t, ref, ref1) + }) + + t.Run("view instead of table", func(t *testing.T) { + ref := sqlconnect.NewRelationRef("view", sqlconnect.WithRelationType(sqlconnect.ViewRelation)) + require.Equal(t, sqlconnect.RelationRef{Name: "view", Type: "view"}, ref) + }) + + t.Run("unmarshal without a type", func(t *testing.T) { + var ref sqlconnect.RelationRef + err := ref.UnmarshalJSON([]byte(`{"name":"table"}`)) + require.NoError(t, err) + require.Equal(t, sqlconnect.NewRelationRef("table"), ref) + }) +} diff --git a/sqlconnect/schemaref.go b/sqlconnect/schemaref.go new file mode 100644 index 0000000..ac890de --- /dev/null +++ b/sqlconnect/schemaref.go @@ -0,0 +1,10 @@ +package sqlconnect + +// SchemaRef provides a reference to a database schema +type SchemaRef struct { + Name string `json:"name"` // the schema +} + +func (s SchemaRef) String() string { + return s.Name +} diff --git a/sqlconnect/schemaref_test.go b/sqlconnect/schemaref_test.go new file mode 100644 index 0000000..19df4e0 --- /dev/null +++ b/sqlconnect/schemaref_test.go @@ -0,0 +1,16 @@ +package sqlconnect_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/rudderlabs/sqlconnect-go/sqlconnect" +) + +func TestSchemaRef(t *testing.T) { + t.Run("string", func(t *testing.T) { + s := sqlconnect.SchemaRef{Name: "schema"} + require.Equal(t, "schema", s.String(), "schema name should be returned") + }) +}