From 30ac6a07b0b084c9726e39a1ad864f7eb67f100b Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:54:22 +0200 Subject: [PATCH 1/4] Set versionless as default for envs --- docs/resources/environment.md | 6 +++--- examples/resources/dbtcloud_environment/resource.tf | 4 ++-- pkg/sdkv2/resources/environment.go | 5 +++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/resources/environment.md b/docs/resources/environment.md index 5a3f3c0..23f2dd2 100644 --- a/docs/resources/environment.md +++ b/docs/resources/environment.md @@ -22,7 +22,7 @@ This version of the provider has the `connection_id` as an optional field but it ```terraform resource "dbtcloud_environment" "ci_environment" { - // the dbt_version is major.minor.0-latest , major.minor.0-pre or versionless (Beta on 15 Feb 2024, to always be on the latest dbt version) + // the dbt_version is major.minor.0-latest , major.minor.0-pre or versionless (by default, it is set to versionless if not configured) dbt_version = "versionless" name = "CI" project_id = dbtcloud_project.dbt_project.id @@ -48,7 +48,7 @@ resource "dbtcloud_environment" "dev_environment" { name = "Dev" project_id = dbtcloud_project.dbt_project.id type = "development" - connection_id = dbtcloud_global_connection.my_other_global_connection + connection_id = dbtcloud_global_connection.my_other_global_connection.id } ``` @@ -57,7 +57,6 @@ resource "dbtcloud_environment" "dev_environment" { ### Required -- `dbt_version` (String) Version number of dbt to use in this environment. It needs to be in the format `major.minor.0-latest` (e.g. `1.5.0-latest`), `major.minor.0-pre` or `versionless`. In a future version of the provider `versionless` will be the default if no version is provided - `name` (String) Environment name - `project_id` (Number) Project ID to create the environment in - `type` (String) The type of environment (must be either development or deployment) @@ -71,6 +70,7 @@ resource "dbtcloud_environment" "dev_environment" { - To avoid Terraform state issues, when using this field, the `dbtcloud_project_connection` resource should be removed from the project or you need to make sure that the `connection_id` is the same in `dbtcloud_project_connection` and in the `connection_id` of the Development environment of the project - `credential_id` (Number) Credential ID to create the environment with. A credential is not required for development environments but is required for deployment environments - `custom_branch` (String) Which custom branch to use in this environment +- `dbt_version` (String) Version number of dbt to use in this environment. It needs to be in the format `major.minor.0-latest` (e.g. `1.5.0-latest`), `major.minor.0-pre` or `versionless`. Defaults to`versionless` if no version is provided - `deployment_type` (String) The type of environment. Only valid for environments of type 'deployment' and for now can only be 'production', 'staging' or left empty for generic environments - `extended_attributes_id` (Number) ID of the extended attributes for the environment - `is_active` (Boolean) Whether the environment is active diff --git a/examples/resources/dbtcloud_environment/resource.tf b/examples/resources/dbtcloud_environment/resource.tf index ac6d7f1..59e69b1 100644 --- a/examples/resources/dbtcloud_environment/resource.tf +++ b/examples/resources/dbtcloud_environment/resource.tf @@ -1,5 +1,5 @@ resource "dbtcloud_environment" "ci_environment" { - // the dbt_version is major.minor.0-latest , major.minor.0-pre or versionless (Beta on 15 Feb 2024, to always be on the latest dbt version) + // the dbt_version is major.minor.0-latest , major.minor.0-pre or versionless (by default, it is set to versionless if not configured) dbt_version = "versionless" name = "CI" project_id = dbtcloud_project.dbt_project.id @@ -25,5 +25,5 @@ resource "dbtcloud_environment" "dev_environment" { name = "Dev" project_id = dbtcloud_project.dbt_project.id type = "development" - connection_id = dbtcloud_global_connection.my_other_global_connection + connection_id = dbtcloud_global_connection.my_other_global_connection.id } diff --git a/pkg/sdkv2/resources/environment.go b/pkg/sdkv2/resources/environment.go index b55321d..007174a 100644 --- a/pkg/sdkv2/resources/environment.go +++ b/pkg/sdkv2/resources/environment.go @@ -52,8 +52,9 @@ func ResourceEnvironment() *schema.Resource { }, "dbt_version": { Type: schema.TypeString, - Required: true, - Description: "Version number of dbt to use in this environment. It needs to be in the format `major.minor.0-latest` (e.g. `1.5.0-latest`), `major.minor.0-pre` or `versionless`. In a future version of the provider `versionless` will be the default if no version is provided", + Optional: true, + Default: "versionless", + Description: "Version number of dbt to use in this environment. It needs to be in the format `major.minor.0-latest` (e.g. `1.5.0-latest`), `major.minor.0-pre` or `versionless`. Defaults to`versionless` if no version is provided", }, "type": { Type: schema.TypeString, From f55a1a89de49abf390529f584fdc62157ff1fa66 Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:55:38 +0200 Subject: [PATCH 2/4] Add ability to compare changes in CI jobs --- docs/data-sources/job.md | 1 + docs/data-sources/jobs.md | 1 + docs/resources/job.md | 1 + pkg/dbt_cloud/job.go | 3 +++ pkg/framework/objects/job/data_source_all.go | 1 + pkg/framework/objects/job/model.go | 1 + pkg/framework/objects/job/schema.go | 4 ++++ pkg/sdkv2/data_sources/job.go | 8 ++++++++ pkg/sdkv2/resources/job.go | 21 ++++++++++++++++++-- pkg/sdkv2/resources/job_acceptance_test.go | 14 +++++++++++-- 10 files changed, 51 insertions(+), 4 deletions(-) diff --git a/docs/data-sources/job.md b/docs/data-sources/job.md index 1f84511..2b56527 100644 --- a/docs/data-sources/job.md +++ b/docs/data-sources/job.md @@ -29,6 +29,7 @@ description: |- - `id` (String) The ID of this resource. - `job_completion_trigger_condition` (Set of Object) Which other job should trigger this job when it finishes, and on which conditions. (see [below for nested schema](#nestedatt--job_completion_trigger_condition)) - `name` (String) Given name for the job +- `run_compare_changes` (Boolean) Whether the CI job should compare data changes introduced by the code change in the PR. - `self_deferring` (Boolean) Whether this job defers on a previous run of itself (overrides value in deferring_job_id) - `timeout_seconds` (Number) Number of seconds before the job times out - `triggers` (Map of Boolean) Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, on_merge diff --git a/docs/data-sources/jobs.md b/docs/data-sources/jobs.md index 5c49c29..26a2091 100644 --- a/docs/data-sources/jobs.md +++ b/docs/data-sources/jobs.md @@ -61,6 +61,7 @@ Read-Only: - `job_type` (String) The type of job (e.g. CI, scheduled) - `name` (String) The name of the job - `project_id` (Number) The ID of the project +- `run_compare_changes` (Boolean) Whether the job should compare data changes introduced by the code change in the PR - `run_generate_sources` (Boolean) Whether the job test source freshness - `schedule` (Attributes) (see [below for nested schema](#nestedatt--jobs--schedule)) - `settings` (Attributes) (see [below for nested schema](#nestedatt--jobs--settings)) diff --git a/docs/resources/job.md b/docs/resources/job.md index 4a5119e..ad22163 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -122,6 +122,7 @@ resource "dbtcloud_job" "downstream_job" { - `is_active` (Boolean) Should always be set to true as setting it to false is the same as creating a job in a deleted state. To create/keep a job in a 'deactivated' state, check the `triggers` config. - `job_completion_trigger_condition` (Block Set, Max: 1) Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining'). (see [below for nested schema](#nestedblock--job_completion_trigger_condition)) - `num_threads` (Number) Number of threads to use in the job +- `run_compare_changes` (Boolean) Whether the CI job should compare data changes introduced by the code changes. Requires `deferring_environment_id` to be set. (Advanced CI needs to be activated in the dbt Cloud Account Settings first as well) - `run_generate_sources` (Boolean) Flag for whether the job should add a `dbt source freshness` step to the job. The difference between manually adding a step with `dbt source freshness` in the job steps or using this flag is that with this flag, a failed freshness will still allow the following steps to run. - `schedule_cron` (String) Custom cron expression for schedule - `schedule_days` (List of Number) List of days of week as numbers (0 = Sunday, 7 = Saturday) to execute the job at if running on a schedule diff --git a/pkg/dbt_cloud/job.go b/pkg/dbt_cloud/job.go index 53724f3..f90f146 100644 --- a/pkg/dbt_cloud/job.go +++ b/pkg/dbt_cloud/job.go @@ -78,6 +78,7 @@ type Job struct { Execution JobExecution `json:"execution"` TriggersOnDraftPR bool `json:"triggers_on_draft_pr"` JobCompletionTrigger *JobCompletionTrigger `json:"job_completion_trigger_condition"` + RunCompareChanges bool `json:"run_compare_changes"` } type JobWithEnvironment struct { @@ -133,6 +134,7 @@ func (c *Client) CreateJob( timeoutSeconds int, triggersOnDraftPR bool, jobCompletionTriggerCondition map[string]any, + runCompareChanges bool, ) (*Job, error) { state := STATE_ACTIVE if !isActive { @@ -232,6 +234,7 @@ func (c *Client) CreateJob( TriggersOnDraftPR: triggersOnDraftPR, JobCompletionTrigger: jobCompletionTrigger, JobType: jobType, + RunCompareChanges: runCompareChanges, } if dbtVersion != "" { newJob.Dbt_Version = &dbtVersion diff --git a/pkg/framework/objects/job/data_source_all.go b/pkg/framework/objects/job/data_source_all.go index 149e4fd..6b9d2bd 100644 --- a/pkg/framework/objects/job/data_source_all.go +++ b/pkg/framework/objects/job/data_source_all.go @@ -131,6 +131,7 @@ func (d *jobsDataSource) Read( }, JobType: types.StringValue(job.JobType), TriggersOnDraftPr: types.BoolValue(job.TriggersOnDraftPR), + RunCompareChanges: types.BoolValue(job.RunCompareChanges), Environment: JobEnvironment{ ProjectID: types.Int64Value(int64(job.Environment.Project_Id)), ID: types.Int64Value(int64(*job.Environment.ID)), diff --git a/pkg/framework/objects/job/model.go b/pkg/framework/objects/job/model.go index 817e29e..3b7c3d3 100644 --- a/pkg/framework/objects/job/model.go +++ b/pkg/framework/objects/job/model.go @@ -66,4 +66,5 @@ type JobDataSourceModel struct { TriggersOnDraftPr types.Bool `tfsdk:"triggers_on_draft_pr"` Environment JobEnvironment `tfsdk:"environment"` JobCompletionTriggerCondition *JobCompletionTrigger `tfsdk:"job_completion_trigger_condition"` + RunCompareChanges types.Bool `tfsdk:"run_compare_changes"` } diff --git a/pkg/framework/objects/job/schema.go b/pkg/framework/objects/job/schema.go index 0a58318..dc8be8f 100644 --- a/pkg/framework/objects/job/schema.go +++ b/pkg/framework/objects/job/schema.go @@ -77,6 +77,10 @@ func (d *jobsDataSource) Schema( Computed: true, Description: "Whether the job test source freshness", }, + "run_compare_changes": schema.BoolAttribute{ + Computed: true, + Description: "Whether the job should compare data changes introduced by the code change in the PR", + }, "id": schema.Int64Attribute{ Computed: true, Description: "The ID of the job", diff --git a/pkg/sdkv2/data_sources/job.go b/pkg/sdkv2/data_sources/job.go index 1a37e35..624ff8b 100644 --- a/pkg/sdkv2/data_sources/job.go +++ b/pkg/sdkv2/data_sources/job.go @@ -98,6 +98,11 @@ var jobSchema = map[string]*schema.Schema{ }, Description: "Which other job should trigger this job when it finishes, and on which conditions.", }, + "run_compare_changes": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether the CI job should compare data changes introduced by the code change in the PR.", + }, } func DatasourceJob() *schema.Resource { @@ -184,6 +189,9 @@ func datasourceJobRead( return diag.FromErr(err) } } + if err := d.Set("run_compare_changes", job.RunCompareChanges); err != nil { + return diag.FromErr(err) + } d.SetId(jobId) diff --git a/pkg/sdkv2/resources/job.go b/pkg/sdkv2/resources/job.go index fc48887..af46624 100644 --- a/pkg/sdkv2/resources/job.go +++ b/pkg/sdkv2/resources/job.go @@ -198,6 +198,13 @@ var jobSchema = map[string]*schema.Schema{ }, Description: "Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining').", }, + "run_compare_changes": { + Type: schema.TypeBool, + Optional: true, + Default: false, + // Once on the plugin framework, put a validation to check that `deferring_environment_id` is set + Description: "Whether the CI job should compare data changes introduced by the code changes. Requires `deferring_environment_id` to be set. (Advanced CI needs to be activated in the dbt Cloud Account Settings first as well)", + }, } func ResourceJob() *schema.Resource { @@ -390,6 +397,9 @@ func resourceJobRead(ctx context.Context, d *schema.ResourceData, m interface{}) return diag.FromErr(err) } } + if err := d.Set("run_compare_changes", job.RunCompareChanges); err != nil { + return diag.FromErr(err) + } return diags } @@ -426,6 +436,7 @@ func resourceJobCreate( selfDeferring := d.Get("self_deferring").(bool) timeoutSeconds := d.Get("timeout_seconds").(int) triggersOnDraftPR := d.Get("triggers_on_draft_pr").(bool) + runCompareChanges := d.Get("run_compare_changes").(bool) var jobCompletionTrigger map[string]any empty, completionJobID, completionProjectID, completionStatuses := utils.ExtractJobConditionSet( @@ -476,6 +487,7 @@ func resourceJobCreate( timeoutSeconds, triggersOnDraftPR, jobCompletionTrigger, + runCompareChanges, ) if err != nil { return diag.FromErr(err) @@ -516,8 +528,9 @@ func resourceJobUpdate( d.HasChange("deferring_environment_id") || d.HasChange("self_deferring") || d.HasChange("timeout_seconds") || - d.HasChange("triggers_on_drat_pr") || - d.HasChange("job_completion_trigger_condition") { + d.HasChange("triggers_on_draft_pr") || + d.HasChange("job_completion_trigger_condition") || + d.HasChange("run_compare_changes") { job, err := c.GetJob(jobId) if err != nil { return diag.FromErr(err) @@ -690,6 +703,10 @@ func resourceJobUpdate( job.JobCompletionTrigger = &jobCondTrigger } } + if d.HasChange("run_compare_changes") { + runCompareChanges := d.Get("run_compare_changes").(bool) + job.RunCompareChanges = runCompareChanges + } _, err = c.UpdateJob(jobId, *job) if err != nil { diff --git a/pkg/sdkv2/resources/job_acceptance_test.go b/pkg/sdkv2/resources/job_acceptance_test.go index 0079dea..14b3b06 100644 --- a/pkg/sdkv2/resources/job_acceptance_test.go +++ b/pkg/sdkv2/resources/job_acceptance_test.go @@ -580,9 +580,17 @@ func testAccDbtCloudJobResourceBasicConfigTriggers( git_trigger := "false" schedule_trigger := "false" on_merge_trigger := "false" + run_compare_changes := "false" + deferringConfig := "" if trigger == "git" { git_trigger = "true" + deferringConfig = "deferring_environment_id = dbtcloud_environment.test_job_environment.environment_id" + if !isDbtCloudPR() { + // we don't want to activate it in Cloud PRs as the setting need to be ON + // TODO: When TF supports account settings, activate the setting in this test and remove this logic + run_compare_changes = "true" + } } if trigger == "schedule" { schedule_trigger = "true" @@ -600,7 +608,7 @@ resource "dbtcloud_environment" "test_job_environment" { project_id = dbtcloud_project.test_job_project.id name = "%s" dbt_version = "%s" - type = "development" + type = "deployment" } resource "dbtcloud_job" "test_job" { @@ -616,8 +624,10 @@ resource "dbtcloud_job" "test_job" { "schedule": %s, "on_merge": %s } + run_compare_changes = %s + %s } -`, projectName, environmentName, DBT_CLOUD_VERSION, jobName, git_trigger, git_trigger, schedule_trigger, on_merge_trigger) +`, projectName, environmentName, DBT_CLOUD_VERSION, jobName, git_trigger, git_trigger, schedule_trigger, on_merge_trigger, run_compare_changes, deferringConfig) } func testAccCheckDbtCloudJobExists(resource string) resource.TestCheckFunc { From a722b8b0fa166ed3bc98b47fdde97707d4435452 Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:55:58 +0200 Subject: [PATCH 3/4] Fix DBX credentials with global connections --- docs/resources/databricks_credential.md | 18 +- .../resource.tf | 14 +- pkg/dbt_cloud/databricks_credential.go | 245 +++++++++++++++++- pkg/sdkv2/resources/databricks_credential.go | 187 ++++++++++++- .../databricks_credential_acceptance_test.go | 143 +++++++++- 5 files changed, 585 insertions(+), 22 deletions(-) diff --git a/docs/resources/databricks_credential.md b/docs/resources/databricks_credential.md index 8896028..bf1699b 100644 --- a/docs/resources/databricks_credential.md +++ b/docs/resources/databricks_credential.md @@ -13,11 +13,20 @@ description: |- ## Example Usage ```terraform -# when using the Databricks adapter +# when using the Databricks adapter with a new `dbtcloud_global_connection` +# we don't provide an `adapter_id` +resource "dbtcloud_databricks_credential" "my_databricks_cred" { + project_id = dbtcloud_project.dbt_project.id + token = "abcdefgh" + schema = "my_schema" + adapter_type = "databricks" +} + +# when using the Databricks adapter with a legacy `dbtcloud_connection` +# we provide an `adapter_id` resource "dbtcloud_databricks_credential" "my_databricks_cred" { project_id = dbtcloud_project.dbt_project.id adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id - target_name = "prod" token = "abcdefgh" schema = "my_schema" adapter_type = "databricks" @@ -27,7 +36,6 @@ resource "dbtcloud_databricks_credential" "my_databricks_cred" { resource "dbtcloud_databricks_credential" "my_spark_cred" { project_id = dbtcloud_project.dbt_project.id adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id - target_name = "prod" token = "abcdefgh" schema = "my_schema" adapter_type = "spark" @@ -39,7 +47,6 @@ resource "dbtcloud_databricks_credential" "my_spark_cred" { ### Required -- `adapter_id` (Number) Databricks adapter ID for the credential - `adapter_type` (String) The type of the adapter (databricks or spark) - `project_id` (Number) Project ID to create the Databricks credential in - `schema` (String) The schema where to create models @@ -47,8 +54,9 @@ resource "dbtcloud_databricks_credential" "my_spark_cred" { ### Optional +- `adapter_id` (Number) Databricks adapter ID for the credential (do not fill in when using global connections, only to be used for connections created with the legacy connection resource `dbtcloud_connection`) - `catalog` (String) The catalog where to create models (only for the databricks adapter) -- `target_name` (String) Target name +- `target_name` (String, Deprecated) Target name ### Read-Only diff --git a/examples/resources/dbtcloud_databricks_credential/resource.tf b/examples/resources/dbtcloud_databricks_credential/resource.tf index 2643ed3..dea5013 100644 --- a/examples/resources/dbtcloud_databricks_credential/resource.tf +++ b/examples/resources/dbtcloud_databricks_credential/resource.tf @@ -1,8 +1,17 @@ -# when using the Databricks adapter +# when using the Databricks adapter with a new `dbtcloud_global_connection` +# we don't provide an `adapter_id` +resource "dbtcloud_databricks_credential" "my_databricks_cred" { + project_id = dbtcloud_project.dbt_project.id + token = "abcdefgh" + schema = "my_schema" + adapter_type = "databricks" +} + +# when using the Databricks adapter with a legacy `dbtcloud_connection` +# we provide an `adapter_id` resource "dbtcloud_databricks_credential" "my_databricks_cred" { project_id = dbtcloud_project.dbt_project.id adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id - target_name = "prod" token = "abcdefgh" schema = "my_schema" adapter_type = "databricks" @@ -12,7 +21,6 @@ resource "dbtcloud_databricks_credential" "my_databricks_cred" { resource "dbtcloud_databricks_credential" "my_spark_cred" { project_id = dbtcloud_project.dbt_project.id adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id - target_name = "prod" token = "abcdefgh" schema = "my_schema" adapter_type = "spark" diff --git a/pkg/dbt_cloud/databricks_credential.go b/pkg/dbt_cloud/databricks_credential.go index aedab1c..9453582 100644 --- a/pkg/dbt_cloud/databricks_credential.go +++ b/pkg/dbt_cloud/databricks_credential.go @@ -34,10 +34,27 @@ type DatabricksCredential struct { Threads int `json:"threads"` Target_Name string `json:"target_name"` Adapter_Id int `json:"adapter_id"` + AdapterVersion string `json:"adapter_version,omitempty"` Credential_Details AdapterCredentialDetails `json:"credential_details"` UnencryptedCredentialDetails DatabricksUnencryptedCredentialDetails `json:"unencrypted_credential_details"` } +type DatabricksCredentialGlobConn struct { + ID *int `json:"id"` + AccountID int `json:"account_id"` + ProjectID int `json:"project_id"` + Type string `json:"type"` + State int `json:"state"` + Threads int `json:"threads"` + AdapterVersion string `json:"adapter_version"` + CredentialDetails AdapterCredentialDetails `json:"credential_details"` +} + +type DatabricksCredentialGLobConnPatch struct { + ID int `json:"id"` + CredentialDetails AdapterCredentialDetails `json:"credential_details"` +} + func (c *Client) GetDatabricksCredential( projectId int, credentialId int, @@ -71,7 +88,7 @@ func (c *Client) GetDatabricksCredential( return &credentialResponse.Data, nil } -func (c *Client) CreateDatabricksCredential( +func (c *Client) CreateDatabricksCredentialLegacy( projectId int, type_ string, targetName string, @@ -195,7 +212,69 @@ func (c *Client) CreateDatabricksCredential( return &databricksCredentialResponse.Data, nil } -func (c *Client) UpdateDatabricksCredential( +func (c *Client) CreateDatabricksCredential( + projectId int, + token string, + schema string, + targetName string, + catalog string, + +) (*DatabricksCredential, error) { + + credentialDetails, err := GenerateDatabricksCredentialDetails( + token, + schema, + targetName, + catalog, + ) + if err != nil { + return nil, err + } + + newDatabricksCredential := DatabricksCredentialGlobConn{ + AccountID: c.AccountID, + ProjectID: projectId, + Type: "adapter", + AdapterVersion: "databricks_v0", + State: STATE_ACTIVE, + Threads: NUM_THREADS_CREDENTIAL, + CredentialDetails: credentialDetails, + } + + newDatabricksCredentialData, err := json.Marshal(newDatabricksCredential) + if err != nil { + return nil, err + } + + req, err := http.NewRequest( + "POST", + fmt.Sprintf( + "%s/v3/accounts/%d/projects/%d/credentials/", + c.HostURL, + c.AccountID, + projectId, + ), + strings.NewReader(string(newDatabricksCredentialData)), + ) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + databricksCredentialResponse := DatabricksCredentialResponse{} + err = json.Unmarshal(body, &databricksCredentialResponse) + if err != nil { + return nil, err + } + + return &databricksCredentialResponse.Data, nil +} + +func (c *Client) UpdateDatabricksCredentialLegacy( projectId int, credentialId int, databricksCredential DatabricksCredential, @@ -233,3 +312,165 @@ func (c *Client) UpdateDatabricksCredential( return &databricksCredentialResponse.Data, nil } + +func (c *Client) UpdateDatabricksCredentialGlobConn( + projectId int, + credentialId int, + databricksCredential DatabricksCredentialGLobConnPatch, +) (*DatabricksCredential, error) { + databricksCredentialData, err := json.Marshal(databricksCredential) + if err != nil { + return nil, err + } + + req, err := http.NewRequest( + "PATCH", + fmt.Sprintf( + "%s/v3/accounts/%d/projects/%d/credentials/%d/", + c.HostURL, + c.AccountID, + projectId, + credentialId, + ), + strings.NewReader(string(databricksCredentialData)), + ) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + databricksCredentialResponse := DatabricksCredentialResponse{} + err = json.Unmarshal(body, &databricksCredentialResponse) + if err != nil { + return nil, err + } + + return &databricksCredentialResponse.Data, nil +} + +func GenerateDatabricksCredentialDetails( + token string, + schema string, + targetName string, + catalog string, + +) (AdapterCredentialDetails, error) { + // the default config is taken from the calls made to the API + // we just remove all the different values and set them to "" + defaultConfig := `{ + "fields": { + "auth_type": { + "metadata": { + "label": "Auth method", + "description": "", + "field_type": "select", + "encrypt": false, + "overrideable": false, + "is_searchable": false, + "options": [ + { + "label": "Token", + "value": "token" + }, + { + "label": "OAuth", + "value": "oauth" + } + ], + "validation": { + "required": true + } + }, + "value": "token" + }, + "token": { + "metadata": { + "label": "Token", + "description": "Personalized user token.", + "field_type": "text", + "encrypt": true, + "depends_on": { + "auth_type": [ + "token" + ] + }, + "overrideable": false, + "validation": { + "required": true + } + }, + "value": "" + }, + "schema": { + "metadata": { + "label": "Schema", + "description": "User schema.", + "field_type": "text", + "encrypt": false, + "overrideable": false, + "validation": { + "required": true + } + }, + "value": "" + }, + "target_name": { + "metadata": { + "label": "Target Name", + "description": "", + "field_type": "text", + "encrypt": false, + "overrideable": false, + "validation": { + "required": false + } + }, + "value": "" + }, + "catalog": { + "metadata": { + "label": "Catalog", + "description": "Catalog name if Unity Catalog is enabled in your Databricks workspace. Only available in dbt version 1.1 and later.", + "field_type": "text", + "encrypt": false, + "overrideable": false, + "validation": { + "required": false + } + }, + "value": "" + } + } + } +` + // we load the raw JSON to make it easier to update if the schema changes in the future + var databricksCredentialDetailsDefault AdapterCredentialDetails + err := json.Unmarshal([]byte(defaultConfig), &databricksCredentialDetailsDefault) + if err != nil { + return databricksCredentialDetailsDefault, err + } + + fieldMapping := map[string]interface{}{ + "token": token, + "schema": schema, + "target_name": targetName, + "catalog": catalog, + "auth_type": "token", + } + + databricksCredentialFields := map[string]AdapterCredentialField{} + for key, value := range databricksCredentialDetailsDefault.Fields { + value.Value = fieldMapping[key] + databricksCredentialFields[key] = value + } + + credentialDetails := AdapterCredentialDetails{ + Fields: databricksCredentialFields, + Field_Order: []string{}, + } + return credentialDetails, nil +} diff --git a/pkg/sdkv2/resources/databricks_credential.go b/pkg/sdkv2/resources/databricks_credential.go index af8d7f6..1df531a 100644 --- a/pkg/sdkv2/resources/databricks_credential.go +++ b/pkg/sdkv2/resources/databricks_credential.go @@ -3,6 +3,7 @@ package resources import ( "context" "fmt" + "strconv" "strings" "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud" @@ -19,6 +20,10 @@ var ( } ) +func isLegacyDatabricksConnection(d *schema.ResourceData) bool { + return d.Get("adapter_id").(int) != 0 +} + func ResourceDatabricksCredential() *schema.Resource { return &schema.Resource{ CreateContext: resourceDatabricksCredentialCreate, @@ -35,8 +40,9 @@ func ResourceDatabricksCredential() *schema.Resource { }, "adapter_id": { Type: schema.TypeInt, - Required: true, - Description: "Databricks adapter ID for the credential", + Optional: true, + ForceNew: true, + Description: "Databricks adapter ID for the credential (do not fill in when using global connections, only to be used for connections created with the legacy connection resource `dbtcloud_connection`)", }, "credential_id": { Type: schema.TypeInt, @@ -48,6 +54,7 @@ func ResourceDatabricksCredential() *schema.Resource { Optional: true, Default: "default", Description: "Target name", + Deprecated: "This field is deprecated at the environment level (it was never possible to set it in the UI) and will be removed in a future release. Please remove it and set the target name at the job level or leverage environment variables.", }, "token": { Type: schema.TypeString, @@ -69,6 +76,7 @@ func ResourceDatabricksCredential() *schema.Resource { "adapter_type": { Type: schema.TypeString, Required: true, + ForceNew: true, Description: "The type of the adapter (databricks or spark)", ValidateFunc: validation.StringInSlice(adapterTypes, false), }, @@ -84,6 +92,18 @@ func resourceDatabricksCredentialCreate( ctx context.Context, d *schema.ResourceData, m interface{}, +) diag.Diagnostics { + if isLegacyDatabricksConnection(d) { + return resourceDatabricksCredentialCreateLegacy(ctx, d, m) + } else { + return resourceDatabricksCredentialCreateGlobConn(ctx, d, m) + } +} + +func resourceDatabricksCredentialCreateLegacy( + ctx context.Context, + d *schema.ResourceData, + m interface{}, ) diag.Diagnostics { c := m.(*dbt_cloud.Client) @@ -98,7 +118,7 @@ func resourceDatabricksCredentialCreate( schema := d.Get("schema").(string) adapterType := d.Get("adapter_type").(string) - databricksCredential, err := c.CreateDatabricksCredential( + databricksCredential, err := c.CreateDatabricksCredentialLegacy( projectId, "adapter", targetName, @@ -126,6 +146,56 @@ func resourceDatabricksCredentialCreate( return diags } +func resourceDatabricksCredentialCreateGlobConn( + ctx context.Context, + d *schema.ResourceData, + m interface{}, +) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + var diags diag.Diagnostics + + projectId := d.Get("project_id").(int) + targetName := d.Get("target_name").(string) + token := d.Get("token").(string) + catalog := d.Get("catalog").(string) + schema := d.Get("schema").(string) + adapterType := d.Get("adapter_type").(string) + + // for now, just supporting databricks + if adapterType == "spark" { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: "Spark adapter is not supported currently for global connections credentials. Please raise a GitHub issue if you need it", + }) + return diags + } + + databricksCredential, err := c.CreateDatabricksCredential( + projectId, + token, + schema, + targetName, + catalog, + ) + if err != nil { + return diag.FromErr(err) + } + + d.SetId( + fmt.Sprintf( + "%d%s%d", + databricksCredential.Project_Id, + dbt_cloud.ID_DELIMITER, + *databricksCredential.ID, + ), + ) + + resourceDatabricksCredentialRead(ctx, d, m) + + return diags +} + func resourceDatabricksCredentialRead( ctx context.Context, d *schema.ResourceData, @@ -184,6 +254,18 @@ func resourceDatabricksCredentialUpdate( ctx context.Context, d *schema.ResourceData, m interface{}, +) diag.Diagnostics { + if isLegacyDatabricksConnection(d) { + return resourceDatabricksCredentialUpdateLegacy(ctx, d, m) + } else { + return resourceDatabricksCredentialUpdateGlobConn(ctx, d, m) + } +} + +func resourceDatabricksCredentialUpdateLegacy( + ctx context.Context, + d *schema.ResourceData, + m interface{}, ) diag.Diagnostics { c := m.(*dbt_cloud.Client) projectId, databricksCredentialId, err := helper.SplitIDToInts( @@ -289,7 +371,7 @@ func resourceDatabricksCredentialUpdate( databricksCredential.Credential_Details = credentialDetails - _, err = c.UpdateDatabricksCredential( + _, err = c.UpdateDatabricksCredentialLegacy( projectId, databricksCredentialId, *databricksCredential, @@ -302,10 +384,73 @@ func resourceDatabricksCredentialUpdate( return resourceDatabricksCredentialRead(ctx, d, m) } +func resourceDatabricksCredentialUpdateGlobConn( + ctx context.Context, + d *schema.ResourceData, + m interface{}, +) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + projectId, databricksCredentialId, err := helper.SplitIDToInts( + d.Id(), + "dbtcloud_databricks_credential", + ) + if err != nil { + return diag.FromErr(err) + } + + if d.HasChange("token") || + d.HasChange("target_name") || + d.HasChange("catalog") || + d.HasChange("schema") { + + patchCredentialsDetails, err := dbt_cloud.GenerateDatabricksCredentialDetails( + d.Get("token").(string), + d.Get("schema").(string), + d.Get("target_name").(string), + d.Get("catalog").(string), + ) + + for key, _ := range patchCredentialsDetails.Fields { + if d.Get(key) == nil || !d.HasChange(key) { + delete(patchCredentialsDetails.Fields, key) + } + } + + databricksPatch := dbt_cloud.DatabricksCredentialGLobConnPatch{ + ID: databricksCredentialId, + CredentialDetails: patchCredentialsDetails, + } + + _, err = c.UpdateDatabricksCredentialGlobConn( + projectId, + databricksCredentialId, + databricksPatch, + ) + + if err != nil { + return diag.FromErr(err) + } + } + + return resourceDatabricksCredentialRead(ctx, d, m) +} + func resourceDatabricksCredentialDelete( ctx context.Context, d *schema.ResourceData, m interface{}, +) diag.Diagnostics { + if isLegacyDatabricksConnection(d) { + return resourceDatabricksCredentialDeleteLegacy(ctx, d, m) + } else { + return resourceDatabricksCredentialDeleteGlobConn(ctx, d, m) + } +} + +func resourceDatabricksCredentialDeleteLegacy( + ctx context.Context, + d *schema.ResourceData, + m interface{}, ) diag.Diagnostics { c := m.(*dbt_cloud.Client) @@ -363,7 +508,39 @@ func resourceDatabricksCredentialDelete( databricksCredential.Credential_Details = credentialDetails - _, err = c.UpdateDatabricksCredential(projectId, databricksCredentialId, *databricksCredential) + _, err = c.UpdateDatabricksCredentialLegacy( + projectId, + databricksCredentialId, + *databricksCredential, + ) + if err != nil { + return diag.FromErr(err) + } + + return diags +} + +func resourceDatabricksCredentialDeleteGlobConn( + ctx context.Context, + d *schema.ResourceData, + m interface{}, +) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + var diags diag.Diagnostics + + projectId, databricksCredentialId, err := helper.SplitIDToInts( + d.Id(), + "dbtcloud_databricks_credential", + ) + if err != nil { + return diag.FromErr(err) + } + + _, err = c.DeleteCredential( + strconv.Itoa(databricksCredentialId), + strconv.Itoa(projectId), + ) if err != nil { return diag.FromErr(err) } diff --git a/pkg/sdkv2/resources/databricks_credential_acceptance_test.go b/pkg/sdkv2/resources/databricks_credential_acceptance_test.go index 9995697..f669f80 100644 --- a/pkg/sdkv2/resources/databricks_credential_acceptance_test.go +++ b/pkg/sdkv2/resources/databricks_credential_acceptance_test.go @@ -13,11 +13,13 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" ) -func TestAccDbtCloudDatabricksCredentialResource(t *testing.T) { +func TestAccDbtCloudDatabricksCredentialResourceLegacy(t *testing.T) { projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) targetName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + targetName2 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) token := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + token2 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -25,7 +27,7 @@ func TestAccDbtCloudDatabricksCredentialResource(t *testing.T) { CheckDestroy: testAccCheckDbtCloudDatabricksCredentialDestroy, Steps: []resource.TestStep{ { - Config: testAccDbtCloudDatabricksCredentialResourceBasicConfig( + Config: testAccDbtCloudDatabricksCredentialResourceBasicConfigLegacy( projectName, targetName, token, @@ -43,6 +45,28 @@ func TestAccDbtCloudDatabricksCredentialResource(t *testing.T) { }, // RENAME // MODIFY + { + Config: testAccDbtCloudDatabricksCredentialResourceBasicConfigLegacy( + projectName, + targetName2, + token2, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudDatabricksCredentialExists( + "dbtcloud_databricks_credential.test_credential", + ), + resource.TestCheckResourceAttr( + "dbtcloud_databricks_credential.test_credential", + "target_name", + targetName2, + ), + resource.TestCheckResourceAttr( + "dbtcloud_databricks_credential.test_credential", + "token", + token2, + ), + ), + }, // IMPORT { ResourceName: "dbtcloud_databricks_credential.test_credential", @@ -54,7 +78,7 @@ func TestAccDbtCloudDatabricksCredentialResource(t *testing.T) { }) } -func testAccDbtCloudDatabricksCredentialResourceBasicConfig( +func testAccDbtCloudDatabricksCredentialResourceBasicConfigLegacy( projectName, targetName, token string, ) string { return fmt.Sprintf(` @@ -70,12 +94,117 @@ resource "dbtcloud_connection" "databricks" { http_path = "/my/path" catalog = "moo" } + +resource "dbtcloud_databricks_credential" "test_credential" { + project_id = dbtcloud_project.test_project.id + adapter_id = dbtcloud_connection.databricks.adapter_id + target_name = "%s" + token = "%s" + schema = "my_schema" + adapter_type = "databricks" +} +`, projectName, targetName, token) +} + +func TestAccDbtCloudDatabricksCredentialResourceGlobConn(t *testing.T) { + + projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + catalog := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + token := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + token2 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccCheckDbtCloudDatabricksCredentialDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDbtCloudDatabricksCredentialResourceBasicConfigGlobConn( + projectName, + catalog, + token, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudDatabricksCredentialExists( + "dbtcloud_databricks_credential.test_credential", + ), + resource.TestCheckResourceAttr( + "dbtcloud_databricks_credential.test_credential", + "catalog", + catalog, + ), + ), + }, + // RENAME + // MODIFY + { + Config: testAccDbtCloudDatabricksCredentialResourceBasicConfigGlobConn( + projectName, + "", + token2, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudDatabricksCredentialExists( + "dbtcloud_databricks_credential.test_credential", + ), + resource.TestCheckResourceAttr( + "dbtcloud_databricks_credential.test_credential", + "catalog", + "", + ), + resource.TestCheckResourceAttr( + "dbtcloud_databricks_credential.test_credential", + "token", + token2, + ), + ), + }, + // IMPORT + { + ResourceName: "dbtcloud_databricks_credential.test_credential", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"token", "adapter_type"}, + }, + }, + }) +} + +func testAccDbtCloudDatabricksCredentialResourceBasicConfigGlobConn( + projectName, targetName, token string, +) string { + return fmt.Sprintf(` +resource "dbtcloud_project" "test_project" { + name = "%s" +} + +resource "dbtcloud_global_connection" "databricks" { + name = "My Databricks connection" + databricks = { + host = "my-databricks-host.cloud.databricks.com" + http_path = "/sql/my/http/path" + catalog = "dbt_catalog" + client_id = "yourclientid" + client_secret = "yourclientsecret" + } +} + +resource "dbtcloud_environment" "prod_environment" { + dbt_version = "versionless" + name = "Prod" + project_id = dbtcloud_project.test_project.id + connection_id = dbtcloud_global_connection.databricks.id + type = "deployment" + credential_id = dbtcloud_databricks_credential.test_credential.credential_id + deployment_type = "production" +} + + resource "dbtcloud_databricks_credential" "test_credential" { project_id = dbtcloud_project.test_project.id - adapter_id = dbtcloud_connection.databricks.adapter_id - target_name = "%s" - token = "%s" - schema = "my_schema" + catalog = "%s" + token = "%s" + schema = "my_schema" adapter_type = "databricks" } `, projectName, targetName, token) From 27a93a5951657fb5d601a7e62ddd4cab8623e93b Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:56:25 +0200 Subject: [PATCH 4/4] Update changelog for new release --- CHANGELOG.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bbf7b54..bb57750 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,19 @@ All notable changes to this project will be documented in this file. -## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.18...HEAD) +## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.19...HEAD) + +# [0.3.19](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.18...v0.3.19) + +### Fixes + +- Allow defining some `dbtcloud_databricks_credential` when using global connections which don't generate an `adapter_id` (seed docs for the resource for more details) + +### Changes + +- Add the ability to compare changes in a `dbtcloud_job` resource +- Add deprecation notice for `target_name` in `dbtcloud_databricks_credential` as those can't be set in the UI +- Make `versionless` the default version for environments, but can still be changed # [0.3.18](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.17...v0.3.18)