Skip to content

Commit

Permalink
Fix DBX credentials with global connections
Browse files Browse the repository at this point in the history
  • Loading branch information
b-per committed Oct 23, 2024
1 parent f55a1a8 commit a722b8b
Show file tree
Hide file tree
Showing 5 changed files with 585 additions and 22 deletions.
18 changes: 13 additions & 5 deletions docs/resources/databricks_credential.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,20 @@ description: |-
## Example Usage

```terraform
# when using the Databricks adapter
# when using the Databricks adapter with a new `dbtcloud_global_connection`
# we don't provide an `adapter_id`
resource "dbtcloud_databricks_credential" "my_databricks_cred" {
project_id = dbtcloud_project.dbt_project.id
token = "abcdefgh"
schema = "my_schema"
adapter_type = "databricks"
}
# when using the Databricks adapter with a legacy `dbtcloud_connection`
# we provide an `adapter_id`
resource "dbtcloud_databricks_credential" "my_databricks_cred" {
project_id = dbtcloud_project.dbt_project.id
adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id
target_name = "prod"
token = "abcdefgh"
schema = "my_schema"
adapter_type = "databricks"
Expand All @@ -27,7 +36,6 @@ resource "dbtcloud_databricks_credential" "my_databricks_cred" {
resource "dbtcloud_databricks_credential" "my_spark_cred" {
project_id = dbtcloud_project.dbt_project.id
adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id
target_name = "prod"
token = "abcdefgh"
schema = "my_schema"
adapter_type = "spark"
Expand All @@ -39,16 +47,16 @@ resource "dbtcloud_databricks_credential" "my_spark_cred" {

### Required

- `adapter_id` (Number) Databricks adapter ID for the credential
- `adapter_type` (String) The type of the adapter (databricks or spark)
- `project_id` (Number) Project ID to create the Databricks credential in
- `schema` (String) The schema where to create models
- `token` (String, Sensitive) Token for Databricks user

### Optional

- `adapter_id` (Number) Databricks adapter ID for the credential (do not fill in when using global connections, only to be used for connections created with the legacy connection resource `dbtcloud_connection`)
- `catalog` (String) The catalog where to create models (only for the databricks adapter)
- `target_name` (String) Target name
- `target_name` (String, Deprecated) Target name

### Read-Only

Expand Down
14 changes: 11 additions & 3 deletions examples/resources/dbtcloud_databricks_credential/resource.tf
Original file line number Diff line number Diff line change
@@ -1,8 +1,17 @@
# when using the Databricks adapter
# when using the Databricks adapter with a new `dbtcloud_global_connection`
# we don't provide an `adapter_id`
resource "dbtcloud_databricks_credential" "my_databricks_cred" {
project_id = dbtcloud_project.dbt_project.id
token = "abcdefgh"
schema = "my_schema"
adapter_type = "databricks"
}

# when using the Databricks adapter with a legacy `dbtcloud_connection`
# we provide an `adapter_id`
resource "dbtcloud_databricks_credential" "my_databricks_cred" {
project_id = dbtcloud_project.dbt_project.id
adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id
target_name = "prod"
token = "abcdefgh"
schema = "my_schema"
adapter_type = "databricks"
Expand All @@ -12,7 +21,6 @@ resource "dbtcloud_databricks_credential" "my_databricks_cred" {
resource "dbtcloud_databricks_credential" "my_spark_cred" {
project_id = dbtcloud_project.dbt_project.id
adapter_id = dbtcloud_connection.my_databricks_connection.adapter_id
target_name = "prod"
token = "abcdefgh"
schema = "my_schema"
adapter_type = "spark"
Expand Down
245 changes: 243 additions & 2 deletions pkg/dbt_cloud/databricks_credential.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,27 @@ type DatabricksCredential struct {
Threads int `json:"threads"`
Target_Name string `json:"target_name"`
Adapter_Id int `json:"adapter_id"`
AdapterVersion string `json:"adapter_version,omitempty"`
Credential_Details AdapterCredentialDetails `json:"credential_details"`
UnencryptedCredentialDetails DatabricksUnencryptedCredentialDetails `json:"unencrypted_credential_details"`
}

type DatabricksCredentialGlobConn struct {
ID *int `json:"id"`
AccountID int `json:"account_id"`
ProjectID int `json:"project_id"`
Type string `json:"type"`
State int `json:"state"`
Threads int `json:"threads"`
AdapterVersion string `json:"adapter_version"`
CredentialDetails AdapterCredentialDetails `json:"credential_details"`
}

type DatabricksCredentialGLobConnPatch struct {
ID int `json:"id"`
CredentialDetails AdapterCredentialDetails `json:"credential_details"`
}

func (c *Client) GetDatabricksCredential(
projectId int,
credentialId int,
Expand Down Expand Up @@ -71,7 +88,7 @@ func (c *Client) GetDatabricksCredential(
return &credentialResponse.Data, nil
}

func (c *Client) CreateDatabricksCredential(
func (c *Client) CreateDatabricksCredentialLegacy(
projectId int,
type_ string,
targetName string,
Expand Down Expand Up @@ -195,7 +212,69 @@ func (c *Client) CreateDatabricksCredential(
return &databricksCredentialResponse.Data, nil
}

func (c *Client) UpdateDatabricksCredential(
func (c *Client) CreateDatabricksCredential(
projectId int,
token string,
schema string,
targetName string,
catalog string,

) (*DatabricksCredential, error) {

credentialDetails, err := GenerateDatabricksCredentialDetails(
token,
schema,
targetName,
catalog,
)
if err != nil {
return nil, err
}

newDatabricksCredential := DatabricksCredentialGlobConn{
AccountID: c.AccountID,
ProjectID: projectId,
Type: "adapter",
AdapterVersion: "databricks_v0",
State: STATE_ACTIVE,
Threads: NUM_THREADS_CREDENTIAL,
CredentialDetails: credentialDetails,
}

newDatabricksCredentialData, err := json.Marshal(newDatabricksCredential)
if err != nil {
return nil, err
}

req, err := http.NewRequest(
"POST",
fmt.Sprintf(
"%s/v3/accounts/%d/projects/%d/credentials/",
c.HostURL,
c.AccountID,
projectId,
),
strings.NewReader(string(newDatabricksCredentialData)),
)
if err != nil {
return nil, err
}

body, err := c.doRequest(req)
if err != nil {
return nil, err
}

databricksCredentialResponse := DatabricksCredentialResponse{}
err = json.Unmarshal(body, &databricksCredentialResponse)
if err != nil {
return nil, err
}

return &databricksCredentialResponse.Data, nil
}

func (c *Client) UpdateDatabricksCredentialLegacy(
projectId int,
credentialId int,
databricksCredential DatabricksCredential,
Expand Down Expand Up @@ -233,3 +312,165 @@ func (c *Client) UpdateDatabricksCredential(

return &databricksCredentialResponse.Data, nil
}

func (c *Client) UpdateDatabricksCredentialGlobConn(
projectId int,
credentialId int,
databricksCredential DatabricksCredentialGLobConnPatch,
) (*DatabricksCredential, error) {
databricksCredentialData, err := json.Marshal(databricksCredential)
if err != nil {
return nil, err
}

req, err := http.NewRequest(
"PATCH",
fmt.Sprintf(
"%s/v3/accounts/%d/projects/%d/credentials/%d/",
c.HostURL,
c.AccountID,
projectId,
credentialId,
),
strings.NewReader(string(databricksCredentialData)),
)
if err != nil {
return nil, err
}

body, err := c.doRequest(req)
if err != nil {
return nil, err
}

databricksCredentialResponse := DatabricksCredentialResponse{}
err = json.Unmarshal(body, &databricksCredentialResponse)
if err != nil {
return nil, err
}

return &databricksCredentialResponse.Data, nil
}

func GenerateDatabricksCredentialDetails(
token string,
schema string,
targetName string,
catalog string,

) (AdapterCredentialDetails, error) {
// the default config is taken from the calls made to the API
// we just remove all the different values and set them to ""
defaultConfig := `{
"fields": {
"auth_type": {
"metadata": {
"label": "Auth method",
"description": "",
"field_type": "select",
"encrypt": false,
"overrideable": false,
"is_searchable": false,
"options": [
{
"label": "Token",
"value": "token"
},
{
"label": "OAuth",
"value": "oauth"
}
],
"validation": {
"required": true
}
},
"value": "token"
},
"token": {
"metadata": {
"label": "Token",
"description": "Personalized user token.",
"field_type": "text",
"encrypt": true,
"depends_on": {
"auth_type": [
"token"
]
},
"overrideable": false,
"validation": {
"required": true
}
},
"value": ""
},
"schema": {
"metadata": {
"label": "Schema",
"description": "User schema.",
"field_type": "text",
"encrypt": false,
"overrideable": false,
"validation": {
"required": true
}
},
"value": ""
},
"target_name": {
"metadata": {
"label": "Target Name",
"description": "",
"field_type": "text",
"encrypt": false,
"overrideable": false,
"validation": {
"required": false
}
},
"value": ""
},
"catalog": {
"metadata": {
"label": "Catalog",
"description": "Catalog name if Unity Catalog is enabled in your Databricks workspace. Only available in dbt version 1.1 and later.",
"field_type": "text",
"encrypt": false,
"overrideable": false,
"validation": {
"required": false
}
},
"value": ""
}
}
}
`
// we load the raw JSON to make it easier to update if the schema changes in the future
var databricksCredentialDetailsDefault AdapterCredentialDetails
err := json.Unmarshal([]byte(defaultConfig), &databricksCredentialDetailsDefault)
if err != nil {
return databricksCredentialDetailsDefault, err
}

fieldMapping := map[string]interface{}{
"token": token,
"schema": schema,
"target_name": targetName,
"catalog": catalog,
"auth_type": "token",
}

databricksCredentialFields := map[string]AdapterCredentialField{}
for key, value := range databricksCredentialDetailsDefault.Fields {
value.Value = fieldMapping[key]
databricksCredentialFields[key] = value
}

credentialDetails := AdapterCredentialDetails{
Fields: databricksCredentialFields,
Field_Order: []string{},
}
return credentialDetails, nil
}
Loading

0 comments on commit a722b8b

Please sign in to comment.