diff --git a/.github/workflows/testacc.yml b/.github/workflows/testacc.yml
index 28037f87..e501eb94 100644
--- a/.github/workflows/testacc.yml
+++ b/.github/workflows/testacc.yml
@@ -83,13 +83,15 @@ jobs:
HYBRID_ORGANIZATION_ID: clx44v7op01nf01m5iohqjkk6
HOSTED_ORGANIZATION_API_TOKEN: ${{ secrets.DEV_HOSTED_ORGANIZATION_API_TOKEN }}
HOSTED_ORGANIZATION_ID: clx42kkcm01fo01o06agtmshg
+ HOSTED_SCIM_ORGANIZATION_API_TOKEN: ${{ secrets.DEV_HOSTED_SCIM_ORGANIZATION_API_TOKEN }}
+ HOSTED_SCIM_ORGANIZATION_ID: clz3bcmd3003m01qemptnfenp
HYBRID_CLUSTER_ID: clxkqfzvm001d01ncr9rs80si
HYBRID_DRY_RUN_CLUSTER_ID: clxko4djp008601njcuoxt4z5
HYBRID_NODE_POOL_ID: clxkqfzvm001c01nc1eosyxzg
ASTRO_API_HOST: https://api.astronomer-dev.io
SKIP_CLUSTER_RESOURCE_TESTS: ${{ env.SKIP_CLUSTER_RESOURCE_TESTS }}
HOSTED_TEAM_ID: clx44rvzr01nc01o06pze6qb7
- HOSTED_USER_ID: clhpichn8002m01mqa4ocs7g6
+ HOSTED_USER_ID: clz3a4ymt004x01on8w5ydq8j
HOSTED_DEPLOYMENT_ID: clyn6kxud003x01mtxmccegnh
HOSTED_WORKSPACE_ID: clx42sxw501gl01o0gjenthnh
HOSTED_API_TOKEN_ID: clxm4836f00ql01me3nigmcr6
@@ -132,12 +134,14 @@ jobs:
HYBRID_ORGANIZATION_ID: clx46ca4y061z01jleyku7sr6
HOSTED_ORGANIZATION_API_TOKEN: ${{ secrets.STAGE_HOSTED_ORGANIZATION_API_TOKEN }}
HOSTED_ORGANIZATION_ID: clx46acvv060e01ilddqlbsmc
+ HOSTED_SCIM_ORGANIZATION_API_TOKEN: ${{ secrets.STAGE_HOSTED_SCIM_ORGANIZATION_API_TOKEN }}
+ HOSTED_SCIM_ORGANIZATION_ID: clz3blqb500lh01mtkwu9zk5z
HYBRID_CLUSTER_ID: clxm3xg9e05bl01ixsrhxje4e
HYBRID_DRY_RUN_CLUSTER_ID: clxm3y54805bs01ix5owqhfff
HYBRID_NODE_POOL_ID: clxm3xg9e05bk01ixrqk52cob
ASTRO_API_HOST: https://api.astronomer-stage.io
HOSTED_TEAM_ID: clx486hno068301il306nuhsm
- HOSTED_USER_ID: cljftnljr00i001nl6bnngxdb
+ HOSTED_USER_ID: clz3a95hw00j301jj5jfmcgwd
HOSTED_DEPLOYMENT_ID: cly6exz4a00zd01k18t5bo1vf
HOSTED_WORKSPACE_ID: clx480rvx068u01j9mp7t7fqh
HOSTED_API_TOKEN_ID: clxm46ged05b301neuucdqwox
@@ -180,12 +184,14 @@ jobs:
HYBRID_ORGANIZATION_ID: clx44v7op01nf01m5iohqjkk6
HOSTED_ORGANIZATION_API_TOKEN: ${{ secrets.DEV_HOSTED_ORGANIZATION_API_TOKEN }}
HOSTED_ORGANIZATION_ID: clx42kkcm01fo01o06agtmshg
+ HOSTED_SCIM_ORGANIZATION_API_TOKEN: ${{ secrets.DEV_HOSTED_SCIM_ORGANIZATION_API_TOKEN }}
+ HOSTED_SCIM_ORGANIZATION_ID: clz3bcmd3003m01qemptnfenp
HYBRID_CLUSTER_ID: clxkqfzvm001d01ncr9rs80si
HYBRID_DRY_RUN_CLUSTER_ID: clxko4djp008601njcuoxt4z5
HYBRID_NODE_POOL_ID: clxkqfzvm001c01nc1eosyxzg
ASTRO_API_HOST: https://api.astronomer-dev.io
HOSTED_TEAM_ID: clx44rvzr01nc01o06pze6qb7
- HOSTED_USER_ID: clhpichn8002m01mqa4ocs7g6
+ HOSTED_USER_ID: clz3a4ymt004x01on8w5ydq8j
HOSTED_DEPLOYMENT_ID: clyn6kxud003x01mtxmccegnh
HOSTED_WORKSPACE_ID: clx42sxw501gl01o0gjenthnh
HOSTED_API_TOKEN_ID: clxm4836f00ql01me3nigmcr6
diff --git a/docs/data-sources/team.md b/docs/data-sources/team.md
index 47f4c9a0..f8a8d12f 100644
--- a/docs/data-sources/team.md
+++ b/docs/data-sources/team.md
@@ -23,21 +23,21 @@ data "astro_team" "example" {
### Required
-- `id` (String) Team identifier
+- `id` (String) Team ID
### Read-Only
- `created_at` (String) Team creation timestamp
- `created_by` (Attributes) Team creator (see [below for nested schema](#nestedatt--created_by))
-- `deployment_roles` (Attributes Set) The roles assigned to the deployments (see [below for nested schema](#nestedatt--deployment_roles))
+- `deployment_roles` (Attributes Set) The roles assigned to the Deployments (see [below for nested schema](#nestedatt--deployment_roles))
- `description` (String) Team description
-- `is_idp_managed` (Boolean) Whether the team is managed by an identity provider
+- `is_idp_managed` (Boolean) Whether the Team is managed by an identity provider
- `name` (String) Team name
-- `organization_role` (String) The role assigned to the organization
-- `roles_count` (Number) Number of roles assigned to the team
+- `organization_role` (String) The role assigned to the Organization
+- `roles_count` (Number) Number of roles assigned to the Team
- `updated_at` (String) Team last updated timestamp
- `updated_by` (Attributes) Team updater (see [below for nested schema](#nestedatt--updated_by))
-- `workspace_roles` (Attributes Set) The roles assigned to the workspaces (see [below for nested schema](#nestedatt--workspace_roles))
+- `workspace_roles` (Attributes Set) The roles assigned to the Workspaces (see [below for nested schema](#nestedatt--workspace_roles))
### Nested Schema for `created_by`
diff --git a/docs/data-sources/teams.md b/docs/data-sources/teams.md
index ed22e46f..a6d13101 100644
--- a/docs/data-sources/teams.md
+++ b/docs/data-sources/teams.md
@@ -36,21 +36,21 @@ data "astro_teams" "example_teams_filter_by_names" {
Required:
-- `id` (String) Team identifier
+- `id` (String) Team ID
Read-Only:
- `created_at` (String) Team creation timestamp
- `created_by` (Attributes) Team creator (see [below for nested schema](#nestedatt--teams--created_by))
-- `deployment_roles` (Attributes Set) The roles assigned to the deployments (see [below for nested schema](#nestedatt--teams--deployment_roles))
+- `deployment_roles` (Attributes Set) The roles assigned to the Deployments (see [below for nested schema](#nestedatt--teams--deployment_roles))
- `description` (String) Team description
-- `is_idp_managed` (Boolean) Whether the team is managed by an identity provider
+- `is_idp_managed` (Boolean) Whether the Team is managed by an identity provider
- `name` (String) Team name
-- `organization_role` (String) The role assigned to the organization
-- `roles_count` (Number) Number of roles assigned to the team
+- `organization_role` (String) The role assigned to the Organization
+- `roles_count` (Number) Number of roles assigned to the Team
- `updated_at` (String) Team last updated timestamp
- `updated_by` (Attributes) Team updater (see [below for nested schema](#nestedatt--teams--updated_by))
-- `workspace_roles` (Attributes Set) The roles assigned to the workspaces (see [below for nested schema](#nestedatt--teams--workspace_roles))
+- `workspace_roles` (Attributes Set) The roles assigned to the Workspaces (see [below for nested schema](#nestedatt--teams--workspace_roles))
### Nested Schema for `teams.created_by`
diff --git a/docs/resources/team.md b/docs/resources/team.md
new file mode 100644
index 00000000..63156cdf
--- /dev/null
+++ b/docs/resources/team.md
@@ -0,0 +1,103 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "astro_team Resource - astro"
+subcategory: ""
+description: |-
+ Team resource
+---
+
+# astro_team (Resource)
+
+Team resource
+
+## Example Usage
+
+```terraform
+resource "astro_team" "example" {
+ name = "team"
+ description = "team-description"
+ member_ids = ["clhpichn8002m01mqa4ocs7g6"]
+ organization_role = "ORGANIZATION_OWNER"
+ workspace_roles = [{
+ workspace_id = "clx42sxw501gl01o0gjenthnh"
+ role = "WORKSPACE_OWNER"
+ }]
+ deployment_roles = [{
+ deployment_id = "clyn6kxud003x01mtxmccegnh"
+ role = "DEPLOYMENT_ADMIN"
+ }]
+}
+
+resource "astro_team" "example_with_no_optional_fields" {
+ name = "team"
+ organization_role = "ORGANIZATION_OWNER"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `name` (String) Team name
+- `organization_role` (String) The role to assign to the Organization
+
+### Optional
+
+- `deployment_roles` (Attributes Set) The roles to assign to the Deployments (see [below for nested schema](#nestedatt--deployment_roles))
+- `description` (String) Team description
+- `member_ids` (Set of String) The IDs of the users to add to the Team
+- `workspace_roles` (Attributes Set) The roles to assign to the Workspaces (see [below for nested schema](#nestedatt--workspace_roles))
+
+### Read-Only
+
+- `created_at` (String) Team creation timestamp
+- `created_by` (Attributes) Team creator (see [below for nested schema](#nestedatt--created_by))
+- `id` (String) Team ID
+- `is_idp_managed` (Boolean) Whether the Team is managed by an identity provider
+- `roles_count` (Number) Number of roles assigned to the Team
+- `updated_at` (String) Team last updated timestamp
+- `updated_by` (Attributes) Team updater (see [below for nested schema](#nestedatt--updated_by))
+
+
+### Nested Schema for `deployment_roles`
+
+Required:
+
+- `deployment_id` (String) The ID of the deployment to assign the role to
+- `role` (String) The role to assign to the deployment
+
+
+
+### Nested Schema for `workspace_roles`
+
+Required:
+
+- `role` (String) The role to assign to the workspace
+- `workspace_id` (String) The ID of the workspace to assign the role to
+
+
+
+### Nested Schema for `created_by`
+
+Read-Only:
+
+- `api_token_name` (String)
+- `avatar_url` (String)
+- `full_name` (String)
+- `id` (String)
+- `subject_type` (String)
+- `username` (String)
+
+
+
+### Nested Schema for `updated_by`
+
+Read-Only:
+
+- `api_token_name` (String)
+- `avatar_url` (String)
+- `full_name` (String)
+- `id` (String)
+- `subject_type` (String)
+- `username` (String)
diff --git a/examples/resources/astro_team/resource.tf b/examples/resources/astro_team/resource.tf
new file mode 100644
index 00000000..db34dfd3
--- /dev/null
+++ b/examples/resources/astro_team/resource.tf
@@ -0,0 +1,20 @@
+resource "astro_team" "example" {
+ name = "team"
+ description = "team-description"
+ member_ids = ["clhpichn8002m01mqa4ocs7g6"]
+ organization_role = "ORGANIZATION_OWNER"
+ workspace_roles = [{
+ workspace_id = "clx42sxw501gl01o0gjenthnh"
+ role = "WORKSPACE_OWNER"
+ }]
+ deployment_roles = [{
+ deployment_id = "clyn6kxud003x01mtxmccegnh"
+ role = "DEPLOYMENT_ADMIN"
+ }]
+}
+
+resource "astro_team" "example_with_no_optional_fields" {
+ name = "team"
+ organization_role = "ORGANIZATION_OWNER"
+}
+
diff --git a/internal/provider/common/role.go b/internal/provider/common/role.go
new file mode 100644
index 00000000..79b6cd09
--- /dev/null
+++ b/internal/provider/common/role.go
@@ -0,0 +1,174 @@
+package common
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/astronomer/terraform-provider-astro/internal/clients"
+ "github.com/astronomer/terraform-provider-astro/internal/clients/platform"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+
+ "github.com/astronomer/terraform-provider-astro/internal/clients/iam"
+ "github.com/astronomer/terraform-provider-astro/internal/provider/models"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/samber/lo"
+)
+
+// RequestWorkspaceRoles converts a Terraform set to a list of iam.WorkspaceRole to be used in create and update requests
+func RequestWorkspaceRoles(ctx context.Context, workspaceRolesObjSet types.Set) ([]iam.WorkspaceRole, diag.Diagnostics) {
+ if len(workspaceRolesObjSet.Elements()) == 0 {
+ return []iam.WorkspaceRole{}, nil
+ }
+
+ var roles []models.WorkspaceRole
+ diags := workspaceRolesObjSet.ElementsAs(ctx, &roles, false)
+ if diags.HasError() {
+ return nil, diags
+ }
+ workspaceRoles := lo.Map(roles, func(role models.WorkspaceRole, _ int) iam.WorkspaceRole {
+ return iam.WorkspaceRole{
+ Role: iam.WorkspaceRoleRole(role.Role.ValueString()),
+ WorkspaceId: role.WorkspaceId.ValueString(),
+ }
+ })
+ return workspaceRoles, nil
+}
+
+// RequestDeploymentRoles converts a Terraform set to a list of iam.DeploymentRole to be used in create and update requests
+func RequestDeploymentRoles(ctx context.Context, deploymentRolesObjSet types.Set) ([]iam.DeploymentRole, diag.Diagnostics) {
+ if len(deploymentRolesObjSet.Elements()) == 0 {
+ return []iam.DeploymentRole{}, nil
+ }
+
+ var roles []models.DeploymentRole
+ diags := deploymentRolesObjSet.ElementsAs(ctx, &roles, false)
+ if diags.HasError() {
+ return nil, diags
+ }
+ deploymentRoles := lo.Map(roles, func(role models.DeploymentRole, _ int) iam.DeploymentRole {
+ return iam.DeploymentRole{
+ Role: role.Role.ValueString(),
+ DeploymentId: role.DeploymentId.ValueString(),
+ }
+ })
+ return deploymentRoles, nil
+}
+
+// ValidateRoleMatchesEntityType checks if the role is valid for the entityType
+func ValidateRoleMatchesEntityType(role string, scopeType string) bool {
+ if role == "" || scopeType == "" {
+ return false
+ }
+
+ organizationRoles := []string{string(iam.ORGANIZATIONBILLINGADMIN), string(iam.ORGANIZATIONMEMBER), string(iam.ORGANIZATIONOWNER)}
+ workspaceRoles := []string{string(iam.WORKSPACEACCESSOR), string(iam.WORKSPACEAUTHOR), string(iam.WORKSPACEMEMBER), string(iam.WORKSPACEOWNER), string(iam.WORKSPACEOPERATOR)}
+ deploymentRoles := []string{"DEPLOYMENT_ADMIN"}
+ var nonEntityRoles []string
+
+ scopeType = strings.ToLower(scopeType)
+ switch scopeType {
+ case "organization":
+ nonEntityRoles = append(workspaceRoles, deploymentRoles...)
+ case "workspace":
+ nonEntityRoles = append(organizationRoles, deploymentRoles...)
+ case "deployment":
+ nonEntityRoles = append(organizationRoles, workspaceRoles...)
+ }
+
+ return !lo.Contains(nonEntityRoles, role)
+}
+
+type ValidateWorkspaceDeploymentRolesInput struct {
+ PlatformClient *platform.ClientWithResponses
+ OrganizationId string
+ DeploymentRoles []iam.DeploymentRole
+ WorkspaceRoles []iam.WorkspaceRole
+}
+
+// ValidateWorkspaceDeploymentRoles checks if deployment roles have corresponding workspace roles
+func ValidateWorkspaceDeploymentRoles(ctx context.Context, input ValidateWorkspaceDeploymentRolesInput) diag.Diagnostics {
+ // return nil if there are no deployment roles
+ if len(input.DeploymentRoles) == 0 {
+ return nil
+ }
+
+ // get list of deployment ids
+ deploymentIds := lo.Map(input.DeploymentRoles, func(role iam.DeploymentRole, _ int) string {
+ return role.DeploymentId
+ })
+
+ // get list of deployments
+ listDeployments, err := input.PlatformClient.ListDeploymentsWithResponse(ctx, input.OrganizationId, &platform.ListDeploymentsParams{
+ DeploymentIds: &deploymentIds,
+ })
+ if err != nil {
+ tflog.Error(ctx, "failed to mutate Team roles", map[string]interface{}{"error": err})
+ return diag.Diagnostics{diag.NewErrorDiagnostic(
+ "Client Error",
+ fmt.Sprintf("Unable to mutate Team roles and list deployments, got error: %s", err),
+ ),
+ }
+ }
+ _, diagnostic := clients.NormalizeAPIError(ctx, listDeployments.HTTPResponse, listDeployments.Body)
+ if diagnostic != nil {
+ return diag.Diagnostics{diagnostic}
+ }
+
+ // get list of workspace ids from deployments
+ deploymentWorkspaceIds := lo.Map(listDeployments.JSON200.Deployments, func(deployment platform.Deployment, _ int) string {
+ return deployment.WorkspaceId
+ })
+
+ // get list of workspaceIds
+ workspaceIds := lo.Map(input.WorkspaceRoles, func(role iam.WorkspaceRole, _ int) string {
+ return role.WorkspaceId
+ })
+
+ // check if deploymentWorkspaceIds are in workspaceIds
+ workspaceIds = lo.Intersect(lo.Uniq(workspaceIds), lo.Uniq(deploymentWorkspaceIds))
+ if len(workspaceIds) != len(deploymentWorkspaceIds) {
+ tflog.Error(ctx, "failed to mutate Team roles", map[string]interface{}{"error": err})
+ return diag.Diagnostics{diag.NewErrorDiagnostic(
+ "Unable to mutate Team roles, not every deployment role has a corresponding workspace role",
+ "Please ensure that every deployment role has a corresponding workspace role",
+ ),
+ }
+ }
+ return nil
+}
+
+// GetDuplicateWorkspaceIds checks if there are duplicate workspace ids in the workspace roles
+func GetDuplicateWorkspaceIds(workspaceRoles []iam.WorkspaceRole) []string {
+ workspaceIdCount := make(map[string]int)
+ for _, role := range workspaceRoles {
+ workspaceIdCount[role.WorkspaceId]++
+ }
+
+ var duplicates []string
+ for id, count := range workspaceIdCount {
+ if count > 1 {
+ duplicates = append(duplicates, id)
+ }
+ }
+
+ return duplicates
+}
+
+// GetDuplicateDeploymentIds checks if there are duplicate deployment ids in the deployment roles
+func GetDuplicateDeploymentIds(deploymentRoles []iam.DeploymentRole) []string {
+ deploymentIdCount := make(map[string]int)
+ for _, role := range deploymentRoles {
+ deploymentIdCount[role.DeploymentId]++
+ }
+
+ var duplicates []string
+ for id, count := range deploymentIdCount {
+ if count > 1 {
+ duplicates = append(duplicates, id)
+ }
+ }
+
+ return duplicates
+}
diff --git a/internal/provider/datasources/data_source_api_token_test.go b/internal/provider/datasources/data_source_api_token_test.go
index 1aacca54..d10c3ea5 100644
--- a/internal/provider/datasources/data_source_api_token_test.go
+++ b/internal/provider/datasources/data_source_api_token_test.go
@@ -21,7 +21,7 @@ func TestAcc_DataSource_ApiToken(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenId, tfVarName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenId, tfVarName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrSet(resourceVar, "id"),
resource.TestCheckResourceAttrSet(resourceVar, "name"),
diff --git a/internal/provider/datasources/data_source_api_tokens_test.go b/internal/provider/datasources/data_source_api_tokens_test.go
index df75cb6c..be9cdfe1 100644
--- a/internal/provider/datasources/data_source_api_tokens_test.go
+++ b/internal/provider/datasources/data_source_api_tokens_test.go
@@ -5,6 +5,8 @@ import (
"os"
"testing"
+ "github.com/astronomer/terraform-provider-astro/internal/provider/common"
+
"github.com/astronomer/terraform-provider-astro/internal/clients/iam"
astronomerprovider "github.com/astronomer/terraform-provider-astro/internal/provider"
@@ -32,7 +34,7 @@ func TestAcc_DataSourceApiTokens(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiTokens(tfVarName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiTokens(tfVarName),
Check: resource.ComposeTestCheckFunc(
checkApiTokens(tfVarName, checkApiTokensInput{
workspaceId: "",
@@ -42,7 +44,7 @@ func TestAcc_DataSourceApiTokens(t *testing.T) {
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiTokensFilterWorkspaceId(tfVarName, tfWorkspaceId),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiTokensFilterWorkspaceId(tfVarName, tfWorkspaceId),
Check: resource.ComposeTestCheckFunc(
checkApiTokens(tfVarName, checkApiTokensInput{
workspaceId: tfWorkspaceId,
@@ -52,7 +54,7 @@ func TestAcc_DataSourceApiTokens(t *testing.T) {
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiTokensFilterDeploymentId(tfVarName, tfDeploymentId),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiTokensFilterDeploymentId(tfVarName, tfDeploymentId),
Check: resource.ComposeTestCheckFunc(
checkApiTokens(tfVarName, checkApiTokensInput{
workspaceId: "",
@@ -62,7 +64,7 @@ func TestAcc_DataSourceApiTokens(t *testing.T) {
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiTokensFilterOrgOnly(tfVarName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiTokensFilterOrgOnly(tfVarName),
Check: resource.ComposeTestCheckFunc(
checkApiTokens(tfVarName, checkApiTokensInput{
workspaceId: "",
@@ -162,7 +164,7 @@ func checkApiTokens(tfVarName string, input checkApiTokensInput) resource.TestCh
if entityId != input.workspaceId {
return fmt.Errorf("expected 'entity_id' to be set to workspace_id")
}
- if utils.ValidateRoleMatchesEntityType(role, "workspace") {
+ if !common.ValidateRoleMatchesEntityType(role, "workspace") {
return fmt.Errorf("expected 'role' to be set as a workspace role")
}
}
@@ -183,7 +185,7 @@ func checkApiTokens(tfVarName string, input checkApiTokensInput) resource.TestCh
if entityId != input.organizationId {
return fmt.Errorf("expected 'entity_id' to be set to organization_id")
}
- if utils.ValidateRoleMatchesEntityType(role, "organization") {
+ if !common.ValidateRoleMatchesEntityType(role, "organization") {
return fmt.Errorf("expected 'role' to be set as an organization role")
}
}
diff --git a/internal/provider/datasources/data_source_cluster_options_test.go b/internal/provider/datasources/data_source_cluster_options_test.go
index bc12feca..b52fe8e2 100644
--- a/internal/provider/datasources/data_source_cluster_options_test.go
+++ b/internal/provider/datasources/data_source_cluster_options_test.go
@@ -20,17 +20,17 @@ func TestAcc_DataSourceClusterOptions(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + clusterOptions("invalid", "AWS"),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + clusterOptions("invalid", "AWS"),
ExpectError: regexp.MustCompile(`type value must be one of`),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + clusterOptions("HYBRID", "AWS"),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + clusterOptions("HYBRID", "AWS"),
Check: resource.ComposeTestCheckFunc(
checkClusterOptions("AWS"),
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + clusterOptionsWithoutProviderFilter("HYBRID"),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + clusterOptionsWithoutProviderFilter("HYBRID"),
Check: resource.ComposeTestCheckFunc(
checkClusterOptionsWithoutProviderFilter(),
),
diff --git a/internal/provider/datasources/data_source_cluster_test.go b/internal/provider/datasources/data_source_cluster_test.go
index 7044821c..c9342ec3 100644
--- a/internal/provider/datasources/data_source_cluster_test.go
+++ b/internal/provider/datasources/data_source_cluster_test.go
@@ -21,7 +21,7 @@ func TestAcc_DataSourceCluster(t *testing.T) {
Steps: []resource.TestStep{
// Check the data source for cluster for a hybrid organization
{
- Config: astronomerprovider.ProviderConfig(t, false) + cluster(resourceName, hybridClusterId),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + cluster(resourceName, hybridClusterId),
Check: resource.ComposeTestCheckFunc(
// These checks are for the cluster data source (singular)
resource.TestCheckResourceAttrSet(resourceVar, "id"),
diff --git a/internal/provider/datasources/data_source_clusters_test.go b/internal/provider/datasources/data_source_clusters_test.go
index bb45f9bf..47103056 100644
--- a/internal/provider/datasources/data_source_clusters_test.go
+++ b/internal/provider/datasources/data_source_clusters_test.go
@@ -23,7 +23,7 @@ func TestAcc_DataSourceClustersHybrid(t *testing.T) {
Steps: []resource.TestStep{
// Check the data source for clusters for a hybrid organization
{
- Config: astronomerprovider.ProviderConfig(t, false) + clusters(tfVarName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + clusters(tfVarName),
Check: resource.ComposeTestCheckFunc(
checkClusters(tfVarName),
),
diff --git a/internal/provider/datasources/data_source_deployment_options_test.go b/internal/provider/datasources/data_source_deployment_options_test.go
index 40c79499..fae00317 100644
--- a/internal/provider/datasources/data_source_deployment_options_test.go
+++ b/internal/provider/datasources/data_source_deployment_options_test.go
@@ -19,49 +19,49 @@ func TestAcc_DataSourceDeploymentOptionsHosted(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, ""),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, ""),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, `deployment_type = "STANDARD"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, `deployment_type = "STANDARD"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, `deployment_type = "DEDICATED"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, `deployment_type = "DEDICATED"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, `executor = "CELERY"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, `executor = "CELERY"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, `executor = "KUBERNETES"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, `executor = "KUBERNETES"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, `cloud_provider = "AWS"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, `cloud_provider = "AWS"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, `cloud_provider = "GCP"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, `cloud_provider = "GCP"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + deploymentOptions(resourceName, `cloud_provider = "AZURE"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + deploymentOptions(resourceName, `cloud_provider = "AZURE"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
@@ -80,43 +80,43 @@ func TestAcc_DataSourceDeploymentOptionsHybrid(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, false) + deploymentOptions(resourceName, ""),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + deploymentOptions(resourceName, ""),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, false) + deploymentOptions(resourceName, `deployment_type = "HYBRID"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + deploymentOptions(resourceName, `deployment_type = "HYBRID"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, false) + deploymentOptions(resourceName, `executor = "CELERY"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + deploymentOptions(resourceName, `executor = "CELERY"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, false) + deploymentOptions(resourceName, `executor = "KUBERNETES"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + deploymentOptions(resourceName, `executor = "KUBERNETES"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, false) + deploymentOptions(resourceName, `cloud_provider = "AWS"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + deploymentOptions(resourceName, `cloud_provider = "AWS"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, false) + deploymentOptions(resourceName, `cloud_provider = "GCP"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + deploymentOptions(resourceName, `cloud_provider = "GCP"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
},
{
- Config: astronomerprovider.ProviderConfig(t, false) + deploymentOptions(resourceName, `cloud_provider = "AZURE"`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + deploymentOptions(resourceName, `cloud_provider = "AZURE"`),
Check: resource.ComposeTestCheckFunc(
CheckDeploymentOptions(resourceVar)...,
),
diff --git a/internal/provider/datasources/data_source_deployments_test.go b/internal/provider/datasources/data_source_deployments_test.go
index 8ec5adbb..aef1f72e 100644
--- a/internal/provider/datasources/data_source_deployments_test.go
+++ b/internal/provider/datasources/data_source_deployments_test.go
@@ -23,7 +23,7 @@ func TestAcc_DataSourceDeployments(t *testing.T) {
Steps: []resource.TestStep{
//Check the data source for deployments for a hosted organization
{
- Config: astronomerprovider.ProviderConfig(t, true) + hostedDeployments(deploymentName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + hostedDeployments(deploymentName),
Check: resource.ComposeTestCheckFunc(
// Doing all checks in one step because we do not want to unnecessarily create multiple deployments for the data sources test
@@ -93,7 +93,7 @@ func TestAcc_DataSourceDeployments(t *testing.T) {
Steps: []resource.TestStep{
//Check the data source for deployments for a hybrid organization
{
- Config: astronomerprovider.ProviderConfig(t, false) + hybridDeployments(),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + hybridDeployments(),
Check: resource.ComposeTestCheckFunc(
// Checks that the deployments data source is not empty and checks the first deployment in the list
// has some of the expected attributes
diff --git a/internal/provider/datasources/data_source_organization_test.go b/internal/provider/datasources/data_source_organization_test.go
index ccf31130..0cbd4b7c 100644
--- a/internal/provider/datasources/data_source_organization_test.go
+++ b/internal/provider/datasources/data_source_organization_test.go
@@ -17,7 +17,7 @@ func TestAcc_DataSourceOrganization(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + organization(),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + organization(),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("data.astro_organization.t", "id", os.Getenv("HOSTED_ORGANIZATION_ID")),
resource.TestCheckResourceAttrSet("data.astro_organization.t", "name"),
diff --git a/internal/provider/datasources/data_source_team.go b/internal/provider/datasources/data_source_team.go
index 65538ed9..62eab101 100644
--- a/internal/provider/datasources/data_source_team.go
+++ b/internal/provider/datasources/data_source_team.go
@@ -69,7 +69,7 @@ func (d *teamDataSource) Configure(
}
func (d *teamDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data models.Team
+ var data models.TeamDataSource
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
diff --git a/internal/provider/datasources/data_source_team_test.go b/internal/provider/datasources/data_source_team_test.go
index a4d89640..4365e749 100644
--- a/internal/provider/datasources/data_source_team_test.go
+++ b/internal/provider/datasources/data_source_team_test.go
@@ -21,7 +21,7 @@ func TestAcc_DataSourceTeam(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + team(teamId, teamName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + team(teamId, teamName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrSet(resourceVar, "id"),
resource.TestCheckResourceAttrSet(resourceVar, "name"),
diff --git a/internal/provider/datasources/data_source_teams_test.go b/internal/provider/datasources/data_source_teams_test.go
index e327e232..41c18e67 100644
--- a/internal/provider/datasources/data_source_teams_test.go
+++ b/internal/provider/datasources/data_source_teams_test.go
@@ -20,7 +20,7 @@ func TestAcc_DataSourceTeams(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + teams(tfVarName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + teams(tfVarName),
Check: resource.ComposeTestCheckFunc(
checkTeams(tfVarName),
),
diff --git a/internal/provider/datasources/data_source_user_test.go b/internal/provider/datasources/data_source_user_test.go
index 7ba99ba6..c07a52ed 100644
--- a/internal/provider/datasources/data_source_user_test.go
+++ b/internal/provider/datasources/data_source_user_test.go
@@ -21,7 +21,7 @@ func TestAcc_DataSourceUser(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + user(userId, userName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + user(userId, userName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrSet(resourceVar, "id"),
resource.TestCheckResourceAttrSet(resourceVar, "username"),
diff --git a/internal/provider/datasources/data_source_users_test.go b/internal/provider/datasources/data_source_users_test.go
index f12c9ab3..2e32be00 100644
--- a/internal/provider/datasources/data_source_users_test.go
+++ b/internal/provider/datasources/data_source_users_test.go
@@ -23,19 +23,19 @@ func TestAcc_DataSourceUsers(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + users(tfVarName),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + users(tfVarName),
Check: resource.ComposeTestCheckFunc(
checkUsers(tfVarName, false, false),
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + usersFilterWorkspaceId(tfVarName, tfWorkspaceId),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + usersFilterWorkspaceId(tfVarName, tfWorkspaceId),
Check: resource.ComposeTestCheckFunc(
checkUsers(tfVarName, true, false),
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + usersFilterDeploymentId(tfVarName, tfDeploymentId),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + usersFilterDeploymentId(tfVarName, tfDeploymentId),
Check: resource.ComposeTestCheckFunc(
checkUsers(tfVarName, false, true),
),
diff --git a/internal/provider/datasources/data_source_workspaces_test.go b/internal/provider/datasources/data_source_workspaces_test.go
index 6db21a8f..bcef28cf 100644
--- a/internal/provider/datasources/data_source_workspaces_test.go
+++ b/internal/provider/datasources/data_source_workspaces_test.go
@@ -22,7 +22,7 @@ func TestAcc_DataSourceWorkspaces(t *testing.T) {
ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspaces(workspaceName, ""),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspaces(workspaceName, ""),
Check: resource.ComposeTestCheckFunc(
// These checks are for the workspace data source (singular)
resource.TestCheckResourceAttrSet("data.astro_workspace.test_data_workspace", "id"),
@@ -41,23 +41,23 @@ func TestAcc_DataSourceWorkspaces(t *testing.T) {
},
// The following tests are for filtering the workspaces data source
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspaces(workspaceName, `workspace_ids = [astro_workspace.test_workspace1.id]`),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspaces(workspaceName, `workspace_ids = [astro_workspace.test_workspace1.id]`),
Check: resource.ComposeTestCheckFunc(
checkWorkspaces(workspaceName + "-1"),
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspaces(workspaceName, fmt.Sprintf(`names = ["%v-1"]`, workspaceName)),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspaces(workspaceName, fmt.Sprintf(`names = ["%v-1"]`, workspaceName)),
Check: resource.ComposeTestCheckFunc(
checkWorkspaces(workspaceName + "-1"),
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspaces(workspaceName, fmt.Sprintf(`names = ["%v"]`, cuid.New())),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspaces(workspaceName, fmt.Sprintf(`names = ["%v"]`, cuid.New())),
Check: checkWorkspacesAreEmpty(),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspaces(workspaceName, fmt.Sprintf(`workspace_ids = ["%v"]`, cuid.New())),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspaces(workspaceName, fmt.Sprintf(`workspace_ids = ["%v"]`, cuid.New())),
Check: checkWorkspacesAreEmpty(),
},
},
diff --git a/internal/provider/models/team.go b/internal/provider/models/team.go
index fac0f9b9..79e9f53a 100644
--- a/internal/provider/models/team.go
+++ b/internal/provider/models/team.go
@@ -10,8 +10,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
)
-// Team describes the data source data model.
-type Team struct {
+// TeamDataSource describes the data source data model.
+type TeamDataSource struct {
Id types.String `tfsdk:"id"`
Name types.String `tfsdk:"name"`
Description types.String `tfsdk:"description"`
@@ -26,7 +26,23 @@ type Team struct {
UpdatedBy types.Object `tfsdk:"updated_by"`
}
-func (data *Team) ReadFromResponse(ctx context.Context, team *iam.Team) diag.Diagnostics {
+type TeamResource struct {
+ Id types.String `tfsdk:"id"`
+ Name types.String `tfsdk:"name"`
+ Description types.String `tfsdk:"description"`
+ IsIdpManaged types.Bool `tfsdk:"is_idp_managed"`
+ MemberIds types.Set `tfsdk:"member_ids"`
+ OrganizationRole types.String `tfsdk:"organization_role"`
+ DeploymentRoles types.Set `tfsdk:"deployment_roles"`
+ WorkspaceRoles types.Set `tfsdk:"workspace_roles"`
+ RolesCount types.Int64 `tfsdk:"roles_count"`
+ CreatedAt types.String `tfsdk:"created_at"`
+ UpdatedAt types.String `tfsdk:"updated_at"`
+ CreatedBy types.Object `tfsdk:"created_by"`
+ UpdatedBy types.Object `tfsdk:"updated_by"`
+}
+
+func (data *TeamDataSource) ReadFromResponse(ctx context.Context, team *iam.Team) diag.Diagnostics {
var diags diag.Diagnostics
data.Id = types.StringValue(team.Id)
data.Name = types.StringValue(team.Name)
@@ -64,3 +80,50 @@ func (data *Team) ReadFromResponse(ctx context.Context, team *iam.Team) diag.Dia
return nil
}
+
+func (data *TeamResource) ReadFromResponse(ctx context.Context, team *iam.Team, memberIds *[]string) diag.Diagnostics {
+ var diags diag.Diagnostics
+ data.Id = types.StringValue(team.Id)
+ data.Name = types.StringValue(team.Name)
+ if team.Description != nil && *team.Description != "" {
+ data.Description = types.StringValue(*team.Description)
+ } else {
+ data.Description = types.StringNull()
+ }
+ if memberIds != nil && len(*memberIds) > 0 {
+ data.MemberIds, diags = utils.StringSet(memberIds)
+ if diags.HasError() {
+ return diags
+ }
+ } else {
+ data.MemberIds = types.SetNull(types.StringType)
+ }
+ data.IsIdpManaged = types.BoolValue(team.IsIdpManaged)
+ data.OrganizationRole = types.StringValue(string(team.OrganizationRole))
+ data.DeploymentRoles, diags = utils.ObjectSet(ctx, team.DeploymentRoles, schemas.DeploymentRoleAttributeTypes(), DeploymentRoleTypesObject)
+ if diags.HasError() {
+ return diags
+ }
+ data.WorkspaceRoles, diags = utils.ObjectSet(ctx, team.WorkspaceRoles, schemas.WorkspaceRoleAttributeTypes(), WorkspaceRoleTypesObject)
+ if diags.HasError() {
+ return diags
+ }
+ if team.RolesCount != nil {
+ data.RolesCount = types.Int64Value(int64(*team.RolesCount))
+ } else {
+ data.RolesCount = types.Int64Value(0)
+ }
+
+ data.CreatedAt = types.StringValue(team.CreatedAt.String())
+ data.UpdatedAt = types.StringValue(team.UpdatedAt.String())
+ data.CreatedBy, diags = SubjectProfileTypesObject(ctx, team.CreatedBy)
+ if diags.HasError() {
+ return diags
+ }
+ data.UpdatedBy, diags = SubjectProfileTypesObject(ctx, team.UpdatedBy)
+ if diags.HasError() {
+ return diags
+ }
+
+ return nil
+}
diff --git a/internal/provider/models/teams.go b/internal/provider/models/teams.go
index b7684dce..9fbd21e0 100644
--- a/internal/provider/models/teams.go
+++ b/internal/provider/models/teams.go
@@ -19,7 +19,7 @@ type Teams struct {
func (data *Teams) ReadFromResponse(ctx context.Context, teams []iam.Team) diag.Diagnostics {
values := make([]attr.Value, len(teams))
for i, team := range teams {
- var singleTeamData Team
+ var singleTeamData TeamDataSource
diags := singleTeamData.ReadFromResponse(ctx, &team)
if diags.HasError() {
return diags
diff --git a/internal/provider/provider.go b/internal/provider/provider.go
index d628b280..c5c4a901 100644
--- a/internal/provider/provider.go
+++ b/internal/provider/provider.go
@@ -126,6 +126,7 @@ func (p *AstroProvider) Resources(ctx context.Context) []func() resource.Resourc
resources.NewTeamRolesResource,
resources.NewHybridClusterWorkspaceAuthorizationResource,
resources.NewApiTokenResource,
+ resources.NewTeamResource,
}
}
diff --git a/internal/provider/provider_test_utils.go b/internal/provider/provider_test_utils.go
index 6ab167cb..e6c93a6f 100644
--- a/internal/provider/provider_test_utils.go
+++ b/internal/provider/provider_test_utils.go
@@ -28,6 +28,8 @@ func TestAccPreCheck(t *testing.T) {
"HOSTED_ORGANIZATION_ID",
"HYBRID_ORGANIZATION_API_TOKEN",
"HYBRID_ORGANIZATION_ID",
+ "HOSTED_SCIM_ORGANIZATION_API_TOKEN",
+ "HOSTED_SCIM_ORGANIZATION_ID",
"HYBRID_DRY_RUN_CLUSTER_ID",
"ASTRO_API_HOST",
"HYBRID_CLUSTER_ID",
@@ -47,14 +49,29 @@ func TestAccPreCheck(t *testing.T) {
}
}
-func ProviderConfig(t *testing.T, isHosted bool) string {
+type TestOrganizationType string
+
+// Define values for TestOrganizationType
+const (
+ HOSTED TestOrganizationType = "HOSTED"
+ HYBRID TestOrganizationType = "HYBRID"
+ HOSTEDSCIM TestOrganizationType = "HOSTED_SCIM"
+)
+
+func ProviderConfig(t *testing.T, testOrganizationType TestOrganizationType) string {
var orgId, token string
- if isHosted {
+ switch testOrganizationType {
+ case HOSTED:
orgId = os.Getenv("HOSTED_ORGANIZATION_ID")
token = os.Getenv("HOSTED_ORGANIZATION_API_TOKEN")
- } else {
+ case HOSTEDSCIM:
+ orgId = os.Getenv("HOSTED_SCIM_ORGANIZATION_ID")
+ token = os.Getenv("HOSTED_SCIM_ORGANIZATION_API_TOKEN")
+ case HYBRID:
orgId = os.Getenv("HYBRID_ORGANIZATION_ID")
token = os.Getenv("HYBRID_ORGANIZATION_API_TOKEN")
+ default:
+ t.Fatalf("Invalid test organization type: %v", testOrganizationType)
}
return fmt.Sprintf(`
diff --git a/internal/provider/resources/resource_api_token.go b/internal/provider/resources/resource_api_token.go
index 32e0cfac..90f93585 100644
--- a/internal/provider/resources/resource_api_token.go
+++ b/internal/provider/resources/resource_api_token.go
@@ -5,6 +5,8 @@ import (
"fmt"
"net/http"
+ "github.com/astronomer/terraform-provider-astro/internal/provider/common"
+
"github.com/astronomer/terraform-provider-astro/internal/clients"
"github.com/astronomer/terraform-provider-astro/internal/clients/iam"
"github.com/astronomer/terraform-provider-astro/internal/provider/models"
@@ -430,7 +432,7 @@ func (r *ApiTokenResource) ValidateConfig(
entityType := data.Type.ValueString()
// Check if the role is valid for the token entity type
- if !utils.ValidateRoleMatchesEntityType(tokenRole.Role, entityType) {
+ if !common.ValidateRoleMatchesEntityType(tokenRole.Role, entityType) {
resp.Diagnostics.AddError(
fmt.Sprintf("Role '%s' is not valid for token type '%s'", tokenRole.Role, entityType),
fmt.Sprintf("Please provide a valid role for the entity type '%s'", entityType),
@@ -468,7 +470,7 @@ func (r *ApiTokenResource) ValidateApiTokenRoles(entityType string, roles []iam.
}
}
- if !utils.ValidateRoleMatchesEntityType(role.Role, string(role.EntityType)) {
+ if !common.ValidateRoleMatchesEntityType(role.Role, string(role.EntityType)) {
return diag.Diagnostics{
diag.NewErrorDiagnostic(
fmt.Sprintf("Role '%s' is not valid for entity type '%s'", role.Role, role.EntityType),
@@ -477,7 +479,7 @@ func (r *ApiTokenResource) ValidateApiTokenRoles(entityType string, roles []iam.
}
}
- if utils.ValidateRoleMatchesEntityType(role.Role, entityType) {
+ if common.ValidateRoleMatchesEntityType(role.Role, entityType) {
numRolesMatchingEntityType++
}
}
diff --git a/internal/provider/resources/resource_api_token_test.go b/internal/provider/resources/resource_api_token_test.go
index 2d28b70d..7b03ef1f 100644
--- a/internal/provider/resources/resource_api_token_test.go
+++ b/internal/provider/resources/resource_api_token_test.go
@@ -41,7 +41,7 @@ func TestAcc_ResourceOrganizationApiToken(t *testing.T) {
Steps: []resource.TestStep{
// Test invalid role for token type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.ORGANIZATION),
Roles: []apiTokenRole{
@@ -56,7 +56,7 @@ func TestAcc_ResourceOrganizationApiToken(t *testing.T) {
},
// Test invalid role for entity type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.ORGANIZATION),
Roles: []apiTokenRole{
@@ -71,7 +71,7 @@ func TestAcc_ResourceOrganizationApiToken(t *testing.T) {
},
// Test multiple roles of the same type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.ORGANIZATION),
Roles: []apiTokenRole{
@@ -91,7 +91,7 @@ func TestAcc_ResourceOrganizationApiToken(t *testing.T) {
},
// Create the organization api token
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: utils.TestResourceDescription,
Type: string(iam.ORGANIZATION),
@@ -142,7 +142,7 @@ func TestAcc_ResourceOrganizationApiToken(t *testing.T) {
},
// Change properties and check they have been updated in terraform state
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: "new description",
Type: string(iam.ORGANIZATION),
@@ -173,7 +173,7 @@ func TestAcc_ResourceOrganizationApiToken(t *testing.T) {
},
// Change the resource type and remove roles and optional fields
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.WORKSPACE),
Roles: []apiTokenRole{
@@ -197,7 +197,7 @@ func TestAcc_ResourceOrganizationApiToken(t *testing.T) {
},
// Change resource type back to ORGANIZATION
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: utils.TestResourceDescription,
Type: string(iam.ORGANIZATION),
@@ -258,7 +258,7 @@ func TestAcc_ResourceWorkspaceApiToken(t *testing.T) {
Steps: []resource.TestStep{
// Test invalid role for token type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.WORKSPACE),
Roles: []apiTokenRole{
@@ -273,7 +273,7 @@ func TestAcc_ResourceWorkspaceApiToken(t *testing.T) {
},
// Test invalid role for entity type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.WORKSPACE),
Roles: []apiTokenRole{
@@ -288,7 +288,7 @@ func TestAcc_ResourceWorkspaceApiToken(t *testing.T) {
},
// Test multiple roles of the same type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.WORKSPACE),
Roles: []apiTokenRole{
@@ -308,7 +308,7 @@ func TestAcc_ResourceWorkspaceApiToken(t *testing.T) {
},
// Create the workspace api token
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: utils.TestResourceDescription,
Type: string(iam.WORKSPACE),
@@ -351,7 +351,7 @@ func TestAcc_ResourceWorkspaceApiToken(t *testing.T) {
},
// Change properties and check they have been updated in terraform state
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: "new description",
Type: string(iam.WORKSPACE),
@@ -377,7 +377,7 @@ func TestAcc_ResourceWorkspaceApiToken(t *testing.T) {
},
// Change the resource type and remove roles and optional fields
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.ORGANIZATION),
Roles: []apiTokenRole{
@@ -401,7 +401,7 @@ func TestAcc_ResourceWorkspaceApiToken(t *testing.T) {
},
// Change resource type back to WORKSPACE
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: utils.TestResourceDescription,
Type: string(iam.WORKSPACE),
@@ -457,7 +457,7 @@ func TestAcc_ResourceDeploymentApiToken(t *testing.T) {
Steps: []resource.TestStep{
// Test invalid role for token type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.DEPLOYMENT),
Roles: []apiTokenRole{
@@ -472,7 +472,7 @@ func TestAcc_ResourceDeploymentApiToken(t *testing.T) {
},
// Test invalid role for entity type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.DEPLOYMENT),
Roles: []apiTokenRole{
@@ -487,7 +487,7 @@ func TestAcc_ResourceDeploymentApiToken(t *testing.T) {
},
// Test invalid role for API token type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Type: string(iam.DEPLOYMENT),
Roles: []apiTokenRole{
@@ -502,7 +502,7 @@ func TestAcc_ResourceDeploymentApiToken(t *testing.T) {
},
// Create the deployment api token
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: utils.TestResourceDescription,
Type: string(iam.DEPLOYMENT),
@@ -537,7 +537,7 @@ func TestAcc_ResourceDeploymentApiToken(t *testing.T) {
},
// Change properties and check they have been updated in terraform state
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: "new description",
Type: string(iam.DEPLOYMENT),
@@ -558,7 +558,7 @@ func TestAcc_ResourceDeploymentApiToken(t *testing.T) {
},
// Change the resource type
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: utils.TestResourceDescription,
Type: string(iam.ORGANIZATION),
@@ -583,7 +583,7 @@ func TestAcc_ResourceDeploymentApiToken(t *testing.T) {
},
// Change resource type back to DEPLOYMENT
{
- Config: astronomerprovider.ProviderConfig(t, true) + apiToken(apiTokenInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + apiToken(apiTokenInput{
Name: apiTokenName,
Description: utils.TestResourceDescription,
Type: string(iam.DEPLOYMENT),
diff --git a/internal/provider/resources/resource_cluster_test.go b/internal/provider/resources/resource_cluster_test.go
index 1fde7e2a..52fb0a7b 100644
--- a/internal/provider/resources/resource_cluster_test.go
+++ b/internal/provider/resources/resource_cluster_test.go
@@ -51,7 +51,7 @@ func TestAcc_ResourceClusterAwsWithDedicatedDeployments(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
workspace(workspaceName, workspaceName, utils.TestResourceDescription, false) +
cluster(clusterInput{
Name: awsClusterName,
@@ -89,7 +89,7 @@ func TestAcc_ResourceClusterAwsWithDedicatedDeployments(t *testing.T) {
},
// Just update cluster and remove workspace restrictions
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
workspace(workspaceName, workspaceName, utils.TestResourceDescription, false) +
cluster(clusterInput{
Name: awsClusterName,
@@ -127,7 +127,7 @@ func TestAcc_ResourceClusterAwsWithDedicatedDeployments(t *testing.T) {
// Change properties of cluster and deployment and check they have been updated in terraform state
// Add back workspace restrictions
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
workspace(workspaceName, workspaceName, utils.TestResourceDescription, false) +
cluster(clusterInput{
Name: awsClusterName,
@@ -165,7 +165,7 @@ func TestAcc_ResourceClusterAwsWithDedicatedDeployments(t *testing.T) {
},
// Remove deployment
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
workspace(workspaceName, workspaceName, utils.TestResourceDescription, false) +
cluster(clusterInput{
Name: awsClusterName,
@@ -223,7 +223,7 @@ func TestAcc_ResourceClusterAzureWithDedicatedDeployments(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
workspace(workspaceName, workspaceName, utils.TestResourceDescription, false) +
cluster(clusterInput{
Name: azureClusterName,
@@ -294,7 +294,7 @@ func TestAcc_ResourceClusterGcpWithDedicatedDeployments(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
workspace(workspaceName, workspaceName, utils.TestResourceDescription, false) +
cluster(clusterInput{
Name: gcpClusterName,
@@ -360,7 +360,7 @@ func TestAcc_ResourceClusterRemovedOutsideOfTerraform(t *testing.T) {
CheckDestroy: testAccCheckClusterExistence(t, clusterName, true, false),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + clusterWithVariableName(depInput),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + clusterWithVariableName(depInput),
ConfigVariables: map[string]config.Variable{
"name": config.StringVariable(clusterName),
},
@@ -375,7 +375,7 @@ func TestAcc_ResourceClusterRemovedOutsideOfTerraform(t *testing.T) {
},
{
PreConfig: func() { deleteClusterOutsideOfTerraform(t, clusterName) },
- Config: astronomerprovider.ProviderConfig(t, true) + clusterWithVariableName(depInput),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + clusterWithVariableName(depInput),
ConfigVariables: map[string]config.Variable{
"name": config.StringVariable(clusterName),
},
diff --git a/internal/provider/resources/resource_deployment_test.go b/internal/provider/resources/resource_deployment_test.go
index 9c4404dc..87797128 100644
--- a/internal/provider/resources/resource_deployment_test.go
+++ b/internal/provider/resources/resource_deployment_test.go
@@ -37,7 +37,7 @@ func TestAcc_ResourceDeploymentHybrid(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, false) + hybridDeployment(hybridDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + hybridDeployment(hybridDeploymentInput{
Name: deploymentName,
Description: utils.TestResourceDescription,
ClusterId: clusterId,
@@ -62,7 +62,7 @@ func TestAcc_ResourceDeploymentHybrid(t *testing.T) {
},
// Change properties and check they have been updated in terraform state including executor change
{
- Config: astronomerprovider.ProviderConfig(t, false) + hybridDeployment(hybridDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + hybridDeployment(hybridDeploymentInput{
Name: deploymentName,
Description: utils.TestResourceDescription,
ClusterId: clusterId,
@@ -83,7 +83,7 @@ func TestAcc_ResourceDeploymentHybrid(t *testing.T) {
},
// Change executor back to KUBERNETES and check it is correctly updated in terraform state
{
- Config: astronomerprovider.ProviderConfig(t, false) + hybridDeployment(hybridDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) + hybridDeployment(hybridDeploymentInput{
Name: deploymentName,
Description: utils.TestResourceDescription,
ClusterId: clusterId,
@@ -132,7 +132,7 @@ func TestAcc_ResourceDeploymentStandard(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeployment(standardDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeployment(standardDeploymentInput{
Name: awsDeploymentName,
Description: utils.TestResourceDescription,
Region: "us-east-1",
@@ -157,7 +157,7 @@ func TestAcc_ResourceDeploymentStandard(t *testing.T) {
},
// Change properties and check they have been updated in terraform state including executor change
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeployment(standardDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeployment(standardDeploymentInput{
Name: awsDeploymentName,
Description: utils.TestResourceDescription,
Region: "us-east-1",
@@ -178,7 +178,7 @@ func TestAcc_ResourceDeploymentStandard(t *testing.T) {
},
// Change executor back to KUBERNETES and check it is correctly updated in terraform state
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeployment(standardDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeployment(standardDeploymentInput{
Name: awsDeploymentName,
Description: utils.TestResourceDescription,
Region: "us-east-1",
@@ -196,7 +196,7 @@ func TestAcc_ResourceDeploymentStandard(t *testing.T) {
},
// Change property that requires destroy and recreate (currently: is_development_mode)
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeployment(standardDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeployment(standardDeploymentInput{
Name: awsDeploymentName,
Description: utils.TestResourceDescription,
Region: "us-east-1",
@@ -215,7 +215,7 @@ func TestAcc_ResourceDeploymentStandard(t *testing.T) {
},
// Change is_development_mode back to false (will not recreate)
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeployment(standardDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeployment(standardDeploymentInput{
Name: awsDeploymentName,
Description: utils.TestResourceDescription,
Region: "us-east-1",
@@ -254,7 +254,7 @@ func TestAcc_ResourceDeploymentStandard(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeployment(standardDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeployment(standardDeploymentInput{
Name: azureCeleryDeploymentName,
Description: utils.TestResourceDescription,
Region: "westus2",
@@ -296,7 +296,7 @@ func TestAcc_ResourceDeploymentStandard(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeployment(standardDeploymentInput{
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeployment(standardDeploymentInput{
Name: gcpKubernetesDeploymentName,
Description: utils.TestResourceDescription,
Region: "us-east4",
@@ -341,20 +341,20 @@ func TestAcc_ResourceDeploymentStandardScalingSpec(t *testing.T) {
PreCheck: func() { astronomerprovider.TestAccPreCheck(t) },
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {}`,
),
ExpectError: regexp.MustCompile(`Inappropriate value for attribute "scaling_spec"`),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {
hibernation_spec = {}
}`),
ExpectError: regexp.MustCompile(`scaling_spec \(hibernation\) must have either override or schedules`),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`
scaling_spec = {
hibernation_spec = {
@@ -364,7 +364,7 @@ func TestAcc_ResourceDeploymentStandardScalingSpec(t *testing.T) {
ExpectError: regexp.MustCompile(`Inappropriate value for attribute "scaling_spec"`),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {
hibernation_spec = {
override = {
@@ -375,7 +375,7 @@ func TestAcc_ResourceDeploymentStandardScalingSpec(t *testing.T) {
ExpectError: regexp.MustCompile(`Inappropriate value for attribute "scaling_spec"`),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {
hibernation_spec = {
schedules = []
@@ -384,13 +384,13 @@ func TestAcc_ResourceDeploymentStandardScalingSpec(t *testing.T) {
ExpectError: regexp.MustCompile(`Attribute scaling_spec.hibernation_spec.schedules set must contain at least 1`), // schedules must have at least one element
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName, ` `), // no scaling spec should be allowed,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName, ` `), // no scaling spec should be allowed,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckNoResourceAttr(scalingSpecResourceVar, "scaling_spec"),
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {
hibernation_spec = {
schedules = [{
@@ -408,7 +408,7 @@ func TestAcc_ResourceDeploymentStandardScalingSpec(t *testing.T) {
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {
hibernation_spec = {
override = {
@@ -422,7 +422,7 @@ func TestAcc_ResourceDeploymentStandardScalingSpec(t *testing.T) {
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {
hibernation_spec = {
override = {
@@ -439,14 +439,14 @@ func TestAcc_ResourceDeploymentStandardScalingSpec(t *testing.T) {
},
// Make scaling spec null to test that it is removed from the deployment with no errors
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
` `),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(scalingSpecResourceVar, "scaling_spec.%", "0"),
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) + developmentDeployment(scalingSpecDeploymentName,
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + developmentDeployment(scalingSpecDeploymentName,
`scaling_spec = {
hibernation_spec = {
schedules = [{
@@ -497,7 +497,7 @@ func TestAcc_ResourceDeploymentStandardRemovedOutsideOfTerraform(t *testing.T) {
CheckDestroy: testAccCheckDeploymentExistence(t, standardDeploymentName, true, false),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeploymentWithVariableName(depInput),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeploymentWithVariableName(depInput),
ConfigVariables: map[string]config.Variable{
"name": config.StringVariable(standardDeploymentName),
},
@@ -513,7 +513,7 @@ func TestAcc_ResourceDeploymentStandardRemovedOutsideOfTerraform(t *testing.T) {
},
{
PreConfig: func() { deleteDeploymentOutsideOfTerraform(t, standardDeploymentName, true) },
- Config: astronomerprovider.ProviderConfig(t, true) + standardDeploymentWithVariableName(depInput),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + standardDeploymentWithVariableName(depInput),
ConfigVariables: map[string]config.Variable{
"name": config.StringVariable(standardDeploymentName),
},
diff --git a/internal/provider/resources/resource_hybrid_cluster_workspace_authorization_test.go b/internal/provider/resources/resource_hybrid_cluster_workspace_authorization_test.go
index e689be12..138ebc42 100644
--- a/internal/provider/resources/resource_hybrid_cluster_workspace_authorization_test.go
+++ b/internal/provider/resources/resource_hybrid_cluster_workspace_authorization_test.go
@@ -40,7 +40,7 @@ func TestAcc_ResourceHybridClusterWorkspaceAuthorization(t *testing.T) {
Steps: []resource.TestStep{
// Test with workspace created through terraform
{
- Config: astronomerprovider.ProviderConfig(t, false) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) +
workspace(workspaceName, workspaceName, utils.TestResourceDescription, false) +
hybridClusterWorkspaceAuthorization(hybridClusterWorkspaceAuthorizationInput{
Name: clusterWorkspaceAuth,
@@ -65,7 +65,7 @@ func TestAcc_ResourceHybridClusterWorkspaceAuthorization(t *testing.T) {
},
// Test with no workspaceIds
{
- Config: astronomerprovider.ProviderConfig(t, false) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HYBRID) +
hybridClusterWorkspaceAuthorization(hybridClusterWorkspaceAuthorizationInput{
Name: clusterWorkspaceAuth,
ClusterId: clusterId,
diff --git a/internal/provider/resources/resource_team.go b/internal/provider/resources/resource_team.go
new file mode 100644
index 00000000..965132c0
--- /dev/null
+++ b/internal/provider/resources/resource_team.go
@@ -0,0 +1,619 @@
+package resources
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ "github.com/astronomer/terraform-provider-astro/internal/clients/platform"
+
+ "github.com/astronomer/terraform-provider-astro/internal/provider/common"
+
+ "github.com/astronomer/terraform-provider-astro/internal/clients"
+ "github.com/astronomer/terraform-provider-astro/internal/clients/iam"
+ "github.com/astronomer/terraform-provider-astro/internal/provider/models"
+ "github.com/astronomer/terraform-provider-astro/internal/provider/schemas"
+ "github.com/astronomer/terraform-provider-astro/internal/utils"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/samber/lo"
+)
+
+var _ resource.Resource = &TeamResource{}
+var _ resource.ResourceWithImportState = &TeamResource{}
+var _ resource.ResourceWithConfigure = &TeamResource{}
+var _ resource.ResourceWithValidateConfig = &TeamResource{}
+
+func NewTeamResource() resource.Resource {
+ return &TeamResource{}
+}
+
+// TeamResource defines the resource implementation.
+type TeamResource struct {
+ IamClient *iam.ClientWithResponses
+ PlatformClient *platform.ClientWithResponses
+ OrganizationId string
+}
+
+func (r *TeamResource) Metadata(
+ ctx context.Context,
+ req resource.MetadataRequest,
+ resp *resource.MetadataResponse,
+) {
+ resp.TypeName = req.ProviderTypeName + "_team"
+}
+
+func (r *TeamResource) Schema(
+ ctx context.Context,
+ req resource.SchemaRequest,
+ resp *resource.SchemaResponse,
+) {
+ resp.Schema = schema.Schema{
+ // This description is used by the documentation generator and the language server.
+ MarkdownDescription: "Team resource",
+ Attributes: schemas.TeamResourceSchemaAttributes(),
+ }
+}
+
+func (r *TeamResource) Configure(
+ ctx context.Context,
+ req resource.ConfigureRequest,
+ resp *resource.ConfigureResponse,
+) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ apiClients, ok := req.ProviderData.(models.ApiClientsModel)
+ if !ok {
+ utils.ResourceApiClientConfigureError(ctx, req, resp)
+ return
+ }
+
+ r.IamClient = apiClients.IamClient
+ r.PlatformClient = apiClients.PlatformClient
+ r.OrganizationId = apiClients.OrganizationId
+}
+
+func (r *TeamResource) MutateRoles(
+ ctx context.Context,
+ data *models.TeamResource,
+ teamId string,
+) diag.Diagnostics {
+ // Convert the models to the request types for the API
+ workspaceRoles, diags := common.RequestWorkspaceRoles(ctx, data.WorkspaceRoles)
+ if diags.HasError() {
+ return diags
+ }
+ deploymentRoles, diags := common.RequestDeploymentRoles(ctx, data.DeploymentRoles)
+ if diags.HasError() {
+ return diags
+ }
+
+ // Validate the roles
+ diags = common.ValidateWorkspaceDeploymentRoles(ctx, common.ValidateWorkspaceDeploymentRolesInput{
+ PlatformClient: r.PlatformClient,
+ OrganizationId: r.OrganizationId,
+ WorkspaceRoles: workspaceRoles,
+ DeploymentRoles: deploymentRoles,
+ })
+ if diags.HasError() {
+ return diags
+ }
+
+ // Update team roles
+ updateTeamRolesRequest := iam.UpdateTeamRolesJSONRequestBody{
+ DeploymentRoles: &deploymentRoles,
+ OrganizationRole: iam.UpdateTeamRolesRequestOrganizationRole(data.OrganizationRole.ValueString()),
+ WorkspaceRoles: &workspaceRoles,
+ }
+ teamRoles, err := r.IamClient.UpdateTeamRolesWithResponse(
+ ctx,
+ r.OrganizationId,
+ teamId,
+ updateTeamRolesRequest,
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to mutate Team roles", map[string]interface{}{"error": err})
+ diags.AddError(
+ "Client Error",
+ fmt.Sprintf("Unable to mutate Team roles, got error: %s", err),
+ )
+ return diags
+ }
+ _, diagnostic := clients.NormalizeAPIError(ctx, teamRoles.HTTPResponse, teamRoles.Body)
+ if diagnostic != nil {
+ diags.Append(diagnostic)
+ return diags
+ }
+
+ return nil
+}
+
+func (r *TeamResource) Create(
+ ctx context.Context,
+ req resource.CreateRequest,
+ resp *resource.CreateResponse,
+) {
+ var data models.TeamResource
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var diags diag.Diagnostics
+
+ // Check if the organization is SCIM enabled, if it is return an error
+ diags = r.CheckOrganizationIsScim(ctx)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ memberIds, diags := utils.TypesSetToStringSlice(ctx, data.MemberIds)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ // Create the team request
+ createTeamRequest := iam.CreateTeamRequest{
+ Name: data.Name.ValueString(),
+ Description: data.Description.ValueStringPointer(),
+ MemberIds: &memberIds,
+ OrganizationRole: lo.ToPtr(iam.CreateTeamRequestOrganizationRole(data.OrganizationRole.ValueString())),
+ }
+
+ // Create the team
+ team, err := r.IamClient.CreateTeamWithResponse(
+ ctx,
+ r.OrganizationId,
+ createTeamRequest,
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to create Team", map[string]interface{}{"error": err})
+ resp.Diagnostics.AddError(
+ "Client Error",
+ fmt.Sprintf("Unable to create Team, got error: %s", err),
+ )
+ return
+ }
+ _, diagnostic := clients.NormalizeAPIError(ctx, team.HTTPResponse, team.Body)
+ if diagnostic != nil {
+ resp.Diagnostics.Append(diagnostic)
+ return
+ }
+
+ teamId := team.JSON200.Id
+
+ // Update team roles
+ if !data.WorkspaceRoles.IsNull() || !data.DeploymentRoles.IsNull() {
+ diags = r.MutateRoles(ctx, &data, teamId)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+ }
+
+ // Get Team and use this as data since it will have the correct roles
+ teamResp, err := r.IamClient.GetTeamWithResponse(
+ ctx,
+ r.OrganizationId,
+ teamId,
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to create Team", map[string]interface{}{"error": err})
+ resp.Diagnostics.AddError(
+ "Client Error",
+ fmt.Sprintf("Unable to create and get Team, got error: %s", err),
+ )
+ return
+ }
+
+ diags = data.ReadFromResponse(ctx, teamResp.JSON200, &memberIds)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ tflog.Trace(ctx, fmt.Sprintf("created a Team resource: %v", data.Id.ValueString()))
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *TeamResource) Read(
+ ctx context.Context,
+ req resource.ReadRequest,
+ resp *resource.ReadResponse,
+) {
+ var data models.TeamResource
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // get request
+ team, err := r.IamClient.GetTeamWithResponse(
+ ctx,
+ r.OrganizationId,
+ data.Id.ValueString(),
+ )
+
+ if err != nil {
+ tflog.Error(ctx, "failed to get Team", map[string]interface{}{"error": err})
+ resp.Diagnostics.AddError(
+ "Client Error",
+ fmt.Sprintf("Unable to get Team, got error: %s", err),
+ )
+ return
+ }
+ statusCode, diagnostic := clients.NormalizeAPIError(ctx, team.HTTPResponse, team.Body)
+ // If the resource no longer exists, it is recommended to ignore the errors
+ // and call RemoveResource to remove the resource from the state. The next Terraform plan will recreate the resource.
+ if statusCode == http.StatusNotFound {
+ resp.State.RemoveResource(ctx)
+ return
+ }
+ if diagnostic != nil {
+ resp.Diagnostics.Append(diagnostic)
+ return
+ }
+
+ memberIds, diags := utils.TypesSetToStringSlice(ctx, data.MemberIds)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ diags = data.ReadFromResponse(ctx, team.JSON200, &memberIds)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ tflog.Trace(ctx, fmt.Sprintf("read a Team resource: %v", data.Id.ValueString()))
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *TeamResource) Update(
+ ctx context.Context,
+ req resource.UpdateRequest,
+ resp *resource.UpdateResponse,
+) {
+ var data models.TeamResource
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var diags diag.Diagnostics
+
+ // Check if the organization is SCIM enabled, if it is return an error
+ diags = r.CheckOrganizationIsScim(ctx)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ // Update team members
+ newMemberIds, diags := r.UpdateTeamMembers(ctx, data)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ // Update team
+ updateTeamRequest := iam.UpdateTeamRequest{
+ Name: data.Name.ValueString(),
+ }
+
+ if !data.Description.IsNull() {
+ updateTeamRequest.Description = data.Description.ValueStringPointer()
+ } else {
+ updateTeamRequest.Description = lo.ToPtr("")
+ }
+
+ team, err := r.IamClient.UpdateTeamWithResponse(
+ ctx,
+ r.OrganizationId,
+ data.Id.ValueString(),
+ updateTeamRequest,
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to update Team", map[string]interface{}{"error": err})
+ resp.Diagnostics.AddError(
+ "Client Error",
+ fmt.Sprintf("Unable to update Team, got error: %s", err),
+ )
+ return
+ }
+ _, diagnostic := clients.NormalizeAPIError(ctx, team.HTTPResponse, team.Body)
+ if diagnostic != nil {
+ resp.Diagnostics.Append(diagnostic)
+ return
+ }
+
+ // Update team roles
+ if !data.WorkspaceRoles.IsNull() || !data.DeploymentRoles.IsNull() {
+ diags = r.MutateRoles(ctx, &data, data.Id.ValueString())
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+ }
+
+ // Get Team and use this as data since it will have the correct roles
+ teamResp, err := r.IamClient.GetTeamWithResponse(
+ ctx,
+ r.OrganizationId,
+ data.Id.ValueString(),
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to update Team", map[string]interface{}{"error": err})
+ resp.Diagnostics.AddError(
+ "Client Error",
+ fmt.Sprintf("Unable to update and get Team, got error: %s", err),
+ )
+ return
+ }
+
+ diags = data.ReadFromResponse(ctx, teamResp.JSON200, &newMemberIds)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ tflog.Trace(ctx, fmt.Sprintf("updated a Team resource: %v", data.Id.ValueString()))
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *TeamResource) Delete(
+ ctx context.Context,
+ req resource.DeleteRequest,
+ resp *resource.DeleteResponse,
+) {
+ var data models.TeamResource
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // delete request
+ team, err := r.IamClient.DeleteTeamWithResponse(
+ ctx,
+ r.OrganizationId,
+ data.Id.ValueString(),
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to delete Team", map[string]interface{}{"error": err})
+ resp.Diagnostics.AddError(
+ "Client Error",
+ fmt.Sprintf("Unable to delete Team, got error: %s", err),
+ )
+ return
+ }
+ statusCode, diagnostic := clients.NormalizeAPIError(ctx, team.HTTPResponse, team.Body)
+ // It is recommended to ignore 404 Resource Not Found errors when deleting a resource
+ if statusCode != http.StatusNotFound && diagnostic != nil {
+ resp.Diagnostics.Append(diagnostic)
+ return
+ }
+
+ tflog.Trace(ctx, fmt.Sprintf("deleted a Team resource: %v", data.Id.ValueString()))
+}
+
+func (r *TeamResource) ImportState(
+ ctx context.Context,
+ req resource.ImportStateRequest,
+ resp *resource.ImportStateResponse,
+) {
+ resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp)
+}
+
+func (r *TeamResource) ValidateConfig(
+ ctx context.Context,
+ req resource.ValidateConfigRequest,
+ resp *resource.ValidateConfigResponse,
+) {
+ var data models.TeamResource
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Validate workspace roles
+ workspaceRoles, diags := common.RequestWorkspaceRoles(ctx, data.WorkspaceRoles)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ for _, role := range workspaceRoles {
+ if !common.ValidateRoleMatchesEntityType(string(role.Role), string(iam.WORKSPACE)) {
+ resp.Diagnostics.AddError(
+ fmt.Sprintf("Role '%s' is not valid for role type '%s'", string(role.Role), string(iam.WORKSPACE)),
+ fmt.Sprintf("Please provide a valid role for the type '%s'", string(iam.WORKSPACE)),
+ )
+ return
+ }
+ }
+
+ duplicateWorkspaceIds := common.GetDuplicateWorkspaceIds(workspaceRoles)
+ if len(duplicateWorkspaceIds) > 0 {
+ resp.Diagnostics.AddError(
+ "Invalid Configuration: Cannot have multiple roles with the same workspace id",
+ fmt.Sprintf("Please provide a unique workspace id for each role. The following workspace ids are duplicated: %v", duplicateWorkspaceIds),
+ )
+ return
+ }
+
+ // Validate deployment roles
+ deploymentRoles, diags := common.RequestDeploymentRoles(ctx, data.DeploymentRoles)
+ if diags.HasError() {
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ for _, role := range deploymentRoles {
+ if !common.ValidateRoleMatchesEntityType(role.Role, string(iam.DEPLOYMENT)) {
+ resp.Diagnostics.AddError(
+ fmt.Sprintf("Role '%s' is not valid for role type '%s'", role.Role, string(iam.DEPLOYMENT)),
+ fmt.Sprintf("Please provide a valid role for the type '%s'", string(iam.DEPLOYMENT)),
+ )
+ return
+ }
+ }
+
+ duplicateDeploymentIds := common.GetDuplicateDeploymentIds(deploymentRoles)
+ if len(duplicateDeploymentIds) > 0 {
+ resp.Diagnostics.AddError(
+ "Invalid Configuration: Cannot have multiple roles with the same deployment id",
+ fmt.Sprintf("Please provide unique deployment id for each role. The following deployment ids are duplicated: %v", duplicateDeploymentIds),
+ )
+ return
+ }
+}
+
+func (r *TeamResource) CheckOrganizationIsScim(ctx context.Context) diag.Diagnostics {
+ // Validate if org isScimEnabled and return error if it is
+ org, err := r.PlatformClient.GetOrganizationWithResponse(ctx, r.OrganizationId, nil)
+ if err != nil {
+ tflog.Error(ctx, "failed to validate Team", map[string]interface{}{"error": err})
+ return diag.Diagnostics{
+ diag.NewErrorDiagnostic(
+ "Client Error",
+ fmt.Sprintf("Unable to validate Team, got error: %s", err),
+ ),
+ }
+ }
+ _, diagnostic := clients.NormalizeAPIError(ctx, org.HTTPResponse, org.Body)
+ if diagnostic != nil {
+ return diag.Diagnostics{diagnostic}
+ }
+ if org.JSON200 == nil {
+ tflog.Error(ctx, "failed to get organization", map[string]interface{}{"error": "nil response"})
+ return diag.Diagnostics{
+ diag.NewErrorDiagnostic(
+ "Client Error",
+ fmt.Sprintf("Unable to read organization %v, got nil response", r.OrganizationId)),
+ }
+ }
+ if org.JSON200.IsScimEnabled {
+ return diag.Diagnostics{
+ diag.NewErrorDiagnostic(
+ "Invalid Configuration: Cannot create, update or delete a Team resource when SCIM is enabled",
+ "Please disable SCIM in the organization settings to manage Team resources",
+ ),
+ }
+ }
+ return nil
+}
+
+func (r *TeamResource) UpdateTeamMembers(ctx context.Context, data models.TeamResource) ([]string, diag.Diagnostics) {
+ // get existing team members
+ teamMembersResp, err := r.IamClient.ListTeamMembersWithResponse(
+ ctx,
+ r.OrganizationId,
+ data.Id.ValueString(),
+ nil,
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to update Team", map[string]interface{}{"error": err})
+ return nil, diag.Diagnostics{
+ diag.NewErrorDiagnostic(
+ "Client Error",
+ fmt.Sprintf("Unable to list existing Team members, got error: %s", err),
+ ),
+ }
+ }
+ _, diagnostic := clients.NormalizeAPIError(ctx, teamMembersResp.HTTPResponse, teamMembersResp.Body)
+ if diagnostic != nil {
+ return nil, diag.Diagnostics{diagnostic}
+ }
+
+ teamMembers := teamMembersResp.JSON200.TeamMembers
+ memberIds := lo.Map(teamMembers, func(tm iam.TeamMember, _ int) string {
+ return tm.UserId
+ })
+
+ // get list of new member ids
+ newMemberIds, diags := utils.TypesSetToStringSlice(ctx, data.MemberIds)
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ // find the difference between the two lists and update the team members
+ deleteIds, addIds := lo.Difference(memberIds, newMemberIds)
+
+ // delete the members that are not in the new list
+ if len(deleteIds) > 0 {
+ for _, id := range deleteIds {
+ removeTeamMemberResp, err := r.IamClient.RemoveTeamMemberWithResponse(
+ ctx,
+ r.OrganizationId,
+ data.Id.ValueString(),
+ id,
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to update Team", map[string]interface{}{"error": err})
+ return nil, diag.Diagnostics{
+ diag.NewErrorDiagnostic(
+ "Client Error",
+ fmt.Sprintf("Unable to remove Team member, got error: %s", err),
+ ),
+ }
+ }
+ _, diagnostic = clients.NormalizeAPIError(ctx, removeTeamMemberResp.HTTPResponse, removeTeamMemberResp.Body)
+ if diagnostic != nil {
+ return nil, diag.Diagnostics{diagnostic}
+ }
+ }
+ }
+
+ // add the members that are in the new list
+ if len(addIds) > 0 {
+ addTeamMembersRequest := iam.AddTeamMembersRequest{
+ MemberIds: addIds,
+ }
+ addTeamMembersResp, err := r.IamClient.AddTeamMembersWithResponse(
+ ctx,
+ r.OrganizationId,
+ data.Id.ValueString(),
+ addTeamMembersRequest,
+ )
+ if err != nil {
+ tflog.Error(ctx, "failed to update Team", map[string]interface{}{"error": err})
+ return nil, diag.Diagnostics{
+ diag.NewErrorDiagnostic(
+ "Client Error",
+ fmt.Sprintf("Unable to add Team members, got error: %s", err),
+ ),
+ }
+ }
+ _, diagnostic = clients.NormalizeAPIError(ctx, addTeamMembersResp.HTTPResponse, addTeamMembersResp.Body)
+ if diagnostic != nil {
+ return nil, diag.Diagnostics{diagnostic}
+ }
+ }
+ return newMemberIds, nil
+}
diff --git a/internal/provider/resources/resource_team_roles.go b/internal/provider/resources/resource_team_roles.go
index 026ee481..13ab3328 100644
--- a/internal/provider/resources/resource_team_roles.go
+++ b/internal/provider/resources/resource_team_roles.go
@@ -5,8 +5,11 @@ import (
"fmt"
"net/http"
+ "github.com/astronomer/terraform-provider-astro/internal/clients/platform"
+
+ "github.com/astronomer/terraform-provider-astro/internal/provider/common"
+
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/samber/lo"
"github.com/astronomer/terraform-provider-astro/internal/clients"
@@ -32,6 +35,7 @@ func NewTeamRolesResource() resource.Resource {
// teamRolesResource defines the resource implementation.
type teamRolesResource struct {
iamClient *iam.ClientWithResponses
+ platformClient *platform.ClientWithResponses
organizationId string
}
@@ -72,6 +76,7 @@ func (r *teamRolesResource) Configure(
}
r.iamClient = apiClients.IamClient
+ r.platformClient = apiClients.PlatformClient
r.organizationId = apiClients.OrganizationId
}
@@ -82,11 +87,22 @@ func (r *teamRolesResource) MutateRoles(
teamId := data.TeamId.ValueString()
// Then convert the models to the request types for the API
- workspaceRoles, diags := RequestWorkspaceRoles(ctx, data.WorkspaceRoles)
+ workspaceRoles, diags := common.RequestWorkspaceRoles(ctx, data.WorkspaceRoles)
if diags.HasError() {
return diags
}
- deploymentRoles, diags := RequestDeploymentRoles(ctx, data.DeploymentRoles)
+ deploymentRoles, diags := common.RequestDeploymentRoles(ctx, data.DeploymentRoles)
+ if diags.HasError() {
+ return diags
+ }
+
+ // Validate the roles
+ diags = common.ValidateWorkspaceDeploymentRoles(ctx, common.ValidateWorkspaceDeploymentRolesInput{
+ PlatformClient: r.platformClient,
+ OrganizationId: r.organizationId,
+ WorkspaceRoles: workspaceRoles,
+ DeploymentRoles: deploymentRoles,
+ })
if diags.HasError() {
return diags
}
@@ -288,43 +304,3 @@ func (r *teamRolesResource) ImportState(
) {
resource.ImportStatePassthroughID(ctx, path.Root("team_id"), req, resp)
}
-
-// RequestWorkspaceRoles converts a Terraform set to a list of iam.WorkspaceRole to be used in create and update requests
-func RequestWorkspaceRoles(ctx context.Context, workspaceRolesObjSet types.Set) ([]iam.WorkspaceRole, diag.Diagnostics) {
- if len(workspaceRolesObjSet.Elements()) == 0 {
- return []iam.WorkspaceRole{}, nil
- }
-
- var roles []models.WorkspaceRole
- diags := workspaceRolesObjSet.ElementsAs(ctx, &roles, false)
- if diags.HasError() {
- return nil, diags
- }
- workspaceRoles := lo.Map(roles, func(role models.WorkspaceRole, _ int) iam.WorkspaceRole {
- return iam.WorkspaceRole{
- Role: iam.WorkspaceRoleRole(role.Role.ValueString()),
- WorkspaceId: role.WorkspaceId.ValueString(),
- }
- })
- return workspaceRoles, nil
-}
-
-// RequestDeploymentRoles converts a Terraform set to a list of iam.DeploymentRole to be used in create and update requests
-func RequestDeploymentRoles(ctx context.Context, deploymentRolesObjSet types.Set) ([]iam.DeploymentRole, diag.Diagnostics) {
- if len(deploymentRolesObjSet.Elements()) == 0 {
- return []iam.DeploymentRole{}, nil
- }
-
- var roles []models.DeploymentRole
- diags := deploymentRolesObjSet.ElementsAs(ctx, &roles, false)
- if diags.HasError() {
- return nil, diags
- }
- deploymentRoles := lo.Map(roles, func(role models.DeploymentRole, _ int) iam.DeploymentRole {
- return iam.DeploymentRole{
- Role: role.Role.ValueString(),
- DeploymentId: role.DeploymentId.ValueString(),
- }
- })
- return deploymentRoles, nil
-}
diff --git a/internal/provider/resources/resource_team_roles_test.go b/internal/provider/resources/resource_team_roles_test.go
index 4af2fffe..52c27cac 100644
--- a/internal/provider/resources/resource_team_roles_test.go
+++ b/internal/provider/resources/resource_team_roles_test.go
@@ -30,22 +30,22 @@ func TestAcc_ResourceTeamRoles(t *testing.T) {
PreCheck: func() { astronomerprovider.TestAccPreCheck(t) },
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
teamRoles(string(iam.ORGANIZATIONBILLINGADMIN), "[]", ""),
ExpectError: regexp.MustCompile("Attribute workspace_roles set must contain at least 1 elements"),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
teamRoles(string(iam.ORGANIZATIONBILLINGADMIN), "", "[]"),
ExpectError: regexp.MustCompile("Attribute deployment_roles set must contain at least 1 elements"),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
teamRoles("", "", ""),
ExpectError: regexp.MustCompile("Attribute organization_role value must be one of"),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
teamRoles(string(iam.ORGANIZATIONBILLINGADMIN), "", ""),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(tfVarName, "team_id", teamId),
@@ -57,7 +57,7 @@ func TestAcc_ResourceTeamRoles(t *testing.T) {
),
},
{
- Config: astronomerprovider.ProviderConfig(t, true) +
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) +
standardDeployment(standardDeploymentInput{
Name: deploymentName,
Description: utils.TestResourceDescription,
diff --git a/internal/provider/resources/resource_team_test.go b/internal/provider/resources/resource_team_test.go
new file mode 100644
index 00000000..a0c0a8f9
--- /dev/null
+++ b/internal/provider/resources/resource_team_test.go
@@ -0,0 +1,282 @@
+package resources_test
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "regexp"
+ "strings"
+ "testing"
+
+ "github.com/astronomer/terraform-provider-astro/internal/clients"
+ "github.com/astronomer/terraform-provider-astro/internal/clients/iam"
+ astronomerprovider "github.com/astronomer/terraform-provider-astro/internal/provider"
+ "github.com/astronomer/terraform-provider-astro/internal/utils"
+ "github.com/hashicorp/terraform-plugin-testing/helper/resource"
+ "github.com/hashicorp/terraform-plugin-testing/terraform"
+ "github.com/samber/lo"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestAcc_ResourceTeam(t *testing.T) {
+ namePrefix := utils.GenerateTestResourceName(10)
+
+ workspaceId := os.Getenv("HOSTED_WORKSPACE_ID")
+ deploymentId := os.Getenv("HOSTED_DEPLOYMENT_ID")
+ userId := os.Getenv("HOSTED_USER_ID")
+
+ failTeamName := fmt.Sprintf("%v_fail_team", namePrefix)
+ teamName := fmt.Sprintf("%v_team", namePrefix)
+ resourceVar := fmt.Sprintf("astro_team.%v", teamName)
+
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: astronomerprovider.TestAccProtoV6ProviderFactories,
+ PreCheck: func() { astronomerprovider.TestAccPreCheck(t) },
+ CheckDestroy: resource.ComposeTestCheckFunc(
+ testAccCheckTeamExistence(t, teamName, false),
+ ),
+ Steps: []resource.TestStep{
+ // Test failure: disable team resource if org is isScimEnabled
+ {
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTEDSCIM) + team(teamInput{
+ Name: failTeamName,
+ Description: utils.TestResourceDescription,
+ MemberIds: []string{userId},
+ OrganizationRole: string(iam.ORGANIZATIONOWNER),
+ DeploymentRoles: []role{
+ {
+ Role: "DEPLOYMENT_ADMIN",
+ Id: deploymentId,
+ },
+ },
+ WorkspaceRoles: []role{
+ {
+ Role: string(iam.WORKSPACEOWNER),
+ Id: workspaceId,
+ },
+ },
+ }),
+ ExpectError: regexp.MustCompile("Invalid Configuration: Cannot create, update or delete a Team resource when SCIM is enabled"),
+ },
+ // Test failure: check for mismatch in role and entity type
+ {
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + team(teamInput{
+ Name: failTeamName,
+ Description: utils.TestResourceDescription,
+ MemberIds: []string{userId},
+ OrganizationRole: string(iam.ORGANIZATIONOWNER),
+ WorkspaceRoles: []role{
+ {
+ Role: string(iam.ORGANIZATIONOWNER),
+ Id: workspaceId,
+ },
+ },
+ }),
+ ExpectError: regexp.MustCompile(fmt.Sprintf("Role '%s' is not valid for role type '%s'", string(iam.ORGANIZATIONOWNER), string(iam.WORKSPACE))),
+ },
+ // Test failure: check for missing corresponding workspace role if deployment role is present
+ {
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + team(teamInput{
+ Name: failTeamName,
+ Description: utils.TestResourceDescription,
+ MemberIds: []string{userId},
+ OrganizationRole: string(iam.ORGANIZATIONOWNER),
+ DeploymentRoles: []role{
+ {
+ Role: "DEPLOYMENT_ADMIN",
+ Id: deploymentId,
+ },
+ },
+ }),
+ ExpectError: regexp.MustCompile("Unable to mutate Team roles, not every deployment role has a corresponding workspace role"),
+ },
+ // Test failure: check for multiple roles with same entity id
+ {
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + team(teamInput{
+ Name: failTeamName,
+ Description: utils.TestResourceDescription,
+ MemberIds: []string{userId},
+ OrganizationRole: string(iam.ORGANIZATIONOWNER),
+ WorkspaceRoles: []role{
+ {
+ Role: string(iam.WORKSPACEOWNER),
+ Id: workspaceId,
+ },
+ {
+ Role: string(iam.WORKSPACEACCESSOR),
+ Id: workspaceId,
+ },
+ },
+ }),
+ ExpectError: regexp.MustCompile("Invalid Configuration: Cannot have multiple roles with the same workspace id"),
+ },
+ // Create team with all fields
+ {
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + team(teamInput{
+ Name: teamName,
+ Description: utils.TestResourceDescription,
+ MemberIds: []string{userId},
+ OrganizationRole: string(iam.ORGANIZATIONOWNER),
+ DeploymentRoles: []role{
+ {
+ Role: "DEPLOYMENT_ADMIN",
+ Id: deploymentId,
+ },
+ },
+ WorkspaceRoles: []role{
+ {
+ Role: string(iam.WORKSPACEOWNER),
+ Id: workspaceId,
+ },
+ },
+ }),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttrSet(resourceVar, "id"),
+ resource.TestCheckResourceAttr(resourceVar, "name", teamName),
+ resource.TestCheckResourceAttr(resourceVar, "description", utils.TestResourceDescription),
+ resource.TestCheckResourceAttr(resourceVar, "organization_role", string(iam.ORGANIZATIONOWNER)),
+ resource.TestCheckResourceAttr(resourceVar, "member_ids.#", "1"),
+ resource.TestCheckResourceAttr(resourceVar, "member_ids.0", userId),
+ resource.TestCheckResourceAttr(resourceVar, "deployment_roles.#", "1"),
+ resource.TestCheckResourceAttr(resourceVar, "deployment_roles.0.role", "DEPLOYMENT_ADMIN"),
+ resource.TestCheckResourceAttr(resourceVar, "deployment_roles.0.deployment_id", deploymentId),
+ resource.TestCheckResourceAttr(resourceVar, "workspace_roles.#", "1"),
+ resource.TestCheckResourceAttr(resourceVar, "workspace_roles.0.role", string(iam.WORKSPACEOWNER)),
+ resource.TestCheckResourceAttr(resourceVar, "workspace_roles.0.workspace_id", workspaceId),
+ resource.TestCheckResourceAttrSet(resourceVar, "is_idp_managed"),
+ resource.TestCheckResourceAttrSet(resourceVar, "roles_count"),
+ resource.TestCheckResourceAttrSet(resourceVar, "created_at"),
+ resource.TestCheckResourceAttrSet(resourceVar, "updated_at"),
+ resource.TestCheckResourceAttrSet(resourceVar, "created_by.id"),
+ resource.TestCheckResourceAttrSet(resourceVar, "updated_by.id"),
+ // Check via API that team exists
+ testAccCheckTeamExistence(t, teamName, true),
+ ),
+ },
+ // Update team
+ {
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + team(teamInput{
+ Name: teamName,
+ Description: "new description",
+ MemberIds: []string{},
+ OrganizationRole: string(iam.ORGANIZATIONOWNER),
+ WorkspaceRoles: []role{
+ {
+ Role: string(iam.WORKSPACEACCESSOR),
+ Id: workspaceId,
+ },
+ },
+ }),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr(resourceVar, "description", "new description"),
+ resource.TestCheckResourceAttr(resourceVar, "member_ids.#", "0"),
+ resource.TestCheckResourceAttr(resourceVar, "workspace_roles.#", "1"),
+ resource.TestCheckResourceAttr(resourceVar, "workspace_roles.0.role", string(iam.WORKSPACEACCESSOR)),
+ resource.TestCheckResourceAttr(resourceVar, "workspace_roles.0.workspace_id", workspaceId),
+ // Check via API that team exists
+ testAccCheckTeamExistence(t, teamName, true),
+ ),
+ },
+ // Import existing team and check it is correctly imported
+ {
+ ResourceName: resourceVar,
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{},
+ },
+ },
+ })
+}
+
+type role struct {
+ Role string
+ Id string
+}
+
+type teamInput struct {
+ Name string
+ Description string
+ MemberIds []string
+ OrganizationRole string
+ DeploymentRoles []role
+ WorkspaceRoles []role
+}
+
+func team(input teamInput) string {
+ var memberIds string
+ if len(input.MemberIds) > 0 {
+ formattedIds := lo.Map(input.MemberIds, func(id string, _ int) string {
+ return fmt.Sprintf(`"%v"`, id)
+ })
+ memberIds = fmt.Sprintf(`member_ids = [%v]`, strings.Join(formattedIds, ", "))
+ }
+
+ deploymentRoles := lo.Map(input.DeploymentRoles, func(role role, _ int) string {
+ return fmt.Sprintf(`
+ {
+ deployment_id = "%v"
+ role = "%v"
+ }`, role.Id, role.Role)
+ })
+
+ workspaceRoles := lo.Map(input.WorkspaceRoles, func(role role, _ int) string {
+ return fmt.Sprintf(`
+ {
+ workspace_id = "%v"
+ role = "%v"
+ }`, role.Id, role.Role)
+ })
+
+ var deploymentRolesStr string
+ if len(deploymentRoles) > 0 {
+ deploymentRolesStr = fmt.Sprintf("deployment_roles = [%v]", strings.Join(deploymentRoles, ","))
+ }
+
+ var workspaceRolesStr string
+ if len(workspaceRoles) > 0 {
+ workspaceRolesStr = fmt.Sprintf("workspace_roles = [%v]", strings.Join(workspaceRoles, ","))
+ }
+
+ return fmt.Sprintf(`
+resource "astro_team" "%v" {
+ name = "%v"
+ description = "%v"
+ %v
+ organization_role = "%v"
+ %v
+ %v
+}`, input.Name, input.Name, input.Description, memberIds, input.OrganizationRole, deploymentRolesStr, workspaceRolesStr)
+}
+
+func testAccCheckTeamExistence(t *testing.T, name string, shouldExist bool) func(s *terraform.State) error {
+ t.Helper()
+ return func(s *terraform.State) error {
+ client, err := utils.GetTestIamClient(true)
+ assert.NoError(t, err)
+
+ organizationId := os.Getenv("HOSTED_ORGANIZATION_ID")
+
+ ctx := context.Background()
+
+ resp, err := client.ListTeamsWithResponse(ctx, organizationId, &iam.ListTeamsParams{
+ Names: &[]string{name},
+ })
+ if err != nil {
+ return fmt.Errorf("failed to list teams: %w", err)
+ }
+ if resp.JSON200 == nil {
+ status, diag := clients.NormalizeAPIError(ctx, resp.HTTPResponse, resp.Body)
+ return fmt.Errorf("response JSON200 is nil status: %v, err: %v", status, diag.Detail())
+ }
+ if shouldExist {
+ if len(resp.JSON200.Teams) != 1 {
+ return fmt.Errorf("team %s should exist", name)
+ }
+ } else {
+ if len(resp.JSON200.Teams) != 0 {
+ return fmt.Errorf("team %s should not exist", name)
+ }
+ }
+ return nil
+ }
+}
diff --git a/internal/provider/resources/resource_workspace_test.go b/internal/provider/resources/resource_workspace_test.go
index 4f1fbbe3..0e77df87 100644
--- a/internal/provider/resources/resource_workspace_test.go
+++ b/internal/provider/resources/resource_workspace_test.go
@@ -34,7 +34,7 @@ func TestAcc_ResourceWorkspace(t *testing.T) {
),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspace("test", workspace1Name, "bad description", false),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspace("test", workspace1Name, "bad description", false),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("astro_workspace.test", "name", workspace1Name),
resource.TestCheckResourceAttr("astro_workspace.test", "description", "bad description"),
@@ -45,7 +45,7 @@ func TestAcc_ResourceWorkspace(t *testing.T) {
},
// Change properties and check they have been updated in terraform state
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspace("test", workspace2Name, utils.TestResourceDescription, true),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspace("test", workspace2Name, utils.TestResourceDescription, true),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("astro_workspace.test", "name", workspace2Name),
resource.TestCheckResourceAttr("astro_workspace.test", "description", utils.TestResourceDescription),
@@ -72,7 +72,7 @@ func TestAcc_WorkspaceRemovedOutsideOfTerraform(t *testing.T) {
CheckDestroy: testAccCheckWorkspaceExistence(t, workspaceName, false),
Steps: []resource.TestStep{
{
- Config: astronomerprovider.ProviderConfig(t, true) + workspaceWithVariableName(),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspaceWithVariableName(),
ConfigVariables: map[string]config.Variable{
"name": config.StringVariable(workspaceName),
},
@@ -88,7 +88,7 @@ func TestAcc_WorkspaceRemovedOutsideOfTerraform(t *testing.T) {
},
{
PreConfig: func() { deleteWorkspaceOutsideOfTerraform(t, workspaceName) },
- Config: astronomerprovider.ProviderConfig(t, true) + workspaceWithVariableName(),
+ Config: astronomerprovider.ProviderConfig(t, astronomerprovider.HOSTED) + workspaceWithVariableName(),
ConfigVariables: map[string]config.Variable{
"name": config.StringVariable(workspaceName),
},
diff --git a/internal/provider/schemas/team.go b/internal/provider/schemas/team.go
index 80c1c6b1..2f7696fb 100644
--- a/internal/provider/schemas/team.go
+++ b/internal/provider/schemas/team.go
@@ -1,15 +1,22 @@
package schemas
import (
+ "github.com/astronomer/terraform-provider-astro/internal/clients/iam"
"github.com/astronomer/terraform-provider-astro/internal/provider/validators"
+ "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
datasourceSchema "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ resourceSchema "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
)
func TeamDataSourceSchemaAttributes() map[string]datasourceSchema.Attribute {
return map[string]datasourceSchema.Attribute{
"id": datasourceSchema.StringAttribute{
- MarkdownDescription: "Team identifier",
+ MarkdownDescription: "Team ID",
Required: true,
Validators: []validator.String{validators.IsCuid()},
},
@@ -22,11 +29,11 @@ func TeamDataSourceSchemaAttributes() map[string]datasourceSchema.Attribute {
Computed: true,
},
"is_idp_managed": datasourceSchema.BoolAttribute{
- MarkdownDescription: "Whether the team is managed by an identity provider",
+ MarkdownDescription: "Whether the Team is managed by an identity provider",
Computed: true,
},
"organization_role": datasourceSchema.StringAttribute{
- MarkdownDescription: "The role assigned to the organization",
+ MarkdownDescription: "The role assigned to the Organization",
Computed: true,
},
"workspace_roles": datasourceSchema.SetNestedAttribute{
@@ -34,17 +41,17 @@ func TeamDataSourceSchemaAttributes() map[string]datasourceSchema.Attribute {
Attributes: DataSourceWorkspaceRoleSchemaAttributes(),
},
Computed: true,
- MarkdownDescription: "The roles assigned to the workspaces",
+ MarkdownDescription: "The roles assigned to the Workspaces",
},
"deployment_roles": datasourceSchema.SetNestedAttribute{
NestedObject: datasourceSchema.NestedAttributeObject{
Attributes: DataSourceDeploymentRoleSchemaAttributes(),
},
Computed: true,
- MarkdownDescription: "The roles assigned to the deployments",
+ MarkdownDescription: "The roles assigned to the Deployments",
},
"roles_count": datasourceSchema.Int64Attribute{
- MarkdownDescription: "Number of roles assigned to the team",
+ MarkdownDescription: "Number of roles assigned to the Team",
Computed: true,
},
"created_at": datasourceSchema.StringAttribute{
@@ -67,3 +74,81 @@ func TeamDataSourceSchemaAttributes() map[string]datasourceSchema.Attribute {
},
}
}
+
+func TeamResourceSchemaAttributes() map[string]resourceSchema.Attribute {
+ return map[string]resourceSchema.Attribute{
+ "id": resourceSchema.StringAttribute{
+ MarkdownDescription: "Team ID",
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "name": resourceSchema.StringAttribute{
+ MarkdownDescription: "Team name",
+ Required: true,
+ },
+ "description": resourceSchema.StringAttribute{
+ MarkdownDescription: "Team description",
+ Optional: true,
+ },
+ "member_ids": resourceSchema.SetAttribute{
+ ElementType: types.StringType,
+ MarkdownDescription: "The IDs of the users to add to the Team",
+ Optional: true,
+ Validators: []validator.Set{
+ setvalidator.ValueStringsAre(validators.IsCuid()),
+ },
+ },
+ "is_idp_managed": resourceSchema.BoolAttribute{
+ MarkdownDescription: "Whether the Team is managed by an identity provider",
+ Computed: true,
+ },
+ "organization_role": resourceSchema.StringAttribute{
+ MarkdownDescription: "The role to assign to the Organization",
+ Required: true,
+ Validators: []validator.String{
+ stringvalidator.OneOf(string(iam.ORGANIZATIONOWNER),
+ string(iam.ORGANIZATIONMEMBER),
+ string(iam.ORGANIZATIONBILLINGADMIN),
+ ),
+ },
+ },
+ "workspace_roles": resourceSchema.SetNestedAttribute{
+ NestedObject: resourceSchema.NestedAttributeObject{
+ Attributes: ResourceWorkspaceRoleSchemaAttributes(),
+ },
+ Optional: true,
+ MarkdownDescription: "The roles to assign to the Workspaces",
+ },
+ "deployment_roles": resourceSchema.SetNestedAttribute{
+ NestedObject: resourceSchema.NestedAttributeObject{
+ Attributes: ResourceDeploymentRoleSchemaAttributes(),
+ },
+ Optional: true,
+ MarkdownDescription: "The roles to assign to the Deployments",
+ },
+ "roles_count": resourceSchema.Int64Attribute{
+ MarkdownDescription: "Number of roles assigned to the Team",
+ Computed: true,
+ },
+ "created_at": resourceSchema.StringAttribute{
+ MarkdownDescription: "Team creation timestamp",
+ Computed: true,
+ },
+ "updated_at": resourceSchema.StringAttribute{
+ MarkdownDescription: "Team last updated timestamp",
+ Computed: true,
+ },
+ "created_by": resourceSchema.SingleNestedAttribute{
+ MarkdownDescription: "Team creator",
+ Computed: true,
+ Attributes: ResourceSubjectProfileSchemaAttributes(),
+ },
+ "updated_by": resourceSchema.SingleNestedAttribute{
+ MarkdownDescription: "Team updater",
+ Computed: true,
+ Attributes: ResourceSubjectProfileSchemaAttributes(),
+ },
+ }
+}
diff --git a/internal/utils/role.go b/internal/utils/role.go
deleted file mode 100644
index ab685429..00000000
--- a/internal/utils/role.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package utils
-
-import (
- "strings"
-
- "github.com/astronomer/terraform-provider-astro/internal/clients/iam"
- "github.com/samber/lo"
-)
-
-func ValidateRoleMatchesEntityType(role string, scopeType string) bool {
- organizationRoles := []string{string(iam.ORGANIZATIONBILLINGADMIN), string(iam.ORGANIZATIONMEMBER), string(iam.ORGANIZATIONOWNER)}
- workspaceRoles := []string{string(iam.WORKSPACEACCESSOR), string(iam.WORKSPACEAUTHOR), string(iam.WORKSPACEMEMBER), string(iam.WORKSPACEOWNER), string(iam.WORKSPACEOPERATOR)}
- var roles []string
-
- scopeType = strings.ToLower(scopeType)
- if scopeType == "organization" {
- roles = organizationRoles
- } else if scopeType == "workspace" {
- roles = workspaceRoles
- } else if scopeType == "deployment" {
- nonDeploymentRoles := append(organizationRoles, workspaceRoles...)
- return !lo.Contains(nonDeploymentRoles, role)
- }
-
- return lo.Contains(roles, role)
-}