diff --git a/.genignore b/.genignore new file mode 100644 index 0000000..bcfbc57 --- /dev/null +++ b/.genignore @@ -0,0 +1 @@ +/internal/provider/pipeline_resource.go diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml index 9e61a1f..27f0fca 100755 --- a/.speakeasy/gen.yaml +++ b/.speakeasy/gen.yaml @@ -9,7 +9,7 @@ generation: nameResolutionDec2023: true parameterOrderingFeb2024: false terraform: - version: 0.1.1 + version: 0.1.2 author: etleap imports: option: openapi diff --git a/internal/provider/pipeline_resource.go b/internal/provider/pipeline_resource.go index ddb6046..19b164f 100644 --- a/internal/provider/pipeline_resource.go +++ b/internal/provider/pipeline_resource.go @@ -1,10 +1,11 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. +// Code originally generated by Speakeasy (https://www.speakeasyapi.dev). package provider import ( "context" "fmt" + speakeasy_boolplanmodifier "github.com/etleap/terraform-provider-etleap/internal/planmodifiers/boolplanmodifier" speakeasy_int64planmodifier "github.com/etleap/terraform-provider-etleap/internal/planmodifiers/int64planmodifier" speakeasy_listplanmodifier "github.com/etleap/terraform-provider-etleap/internal/planmodifiers/listplanmodifier" @@ -13,6 +14,7 @@ import ( speakeasy_stringplanmodifier "github.com/etleap/terraform-provider-etleap/internal/planmodifiers/stringplanmodifier" "github.com/etleap/terraform-provider-etleap/internal/sdk" "github.com/etleap/terraform-provider-etleap/internal/sdk/pkg/models/operations" + "github.com/etleap/terraform-provider-etleap/internal/sdk/pkg/models/shared" "github.com/etleap/terraform-provider-etleap/internal/validators" speakeasy_listvalidators "github.com/etleap/terraform-provider-etleap/internal/validators/listvalidators" speakeasy_numbervalidators "github.com/etleap/terraform-provider-etleap/internal/validators/numbervalidators" @@ -99,25 +101,19 @@ func (r *PipelineResource) Schema(ctx context.Context, req resource.SchemaReques Description: `Specifies whether any remaining export products in the destination created by this pipeline should be deleted. For REDSHIFT and SNOWFLAKE destinations this means tables, and for S3 DATA LAKE destinations this means data output to S3 as well as any tables created in Glue. Defaults to ` + "`" + `false` + "`" + `. Default: false`, }, "destination": schema.SingleNestedAttribute{ - PlanModifiers: []planmodifier.Object{ - objectplanmodifier.RequiresReplaceIfConfigured(), - }, - Required: true, + PlanModifiers: []planmodifier.Object{}, + Required: true, Attributes: map[string]schema.Attribute{ "delta_lake": schema.SingleNestedAttribute{ - PlanModifiers: []planmodifier.Object{ - objectplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, + PlanModifiers: []planmodifier.Object{}, + Optional: true, Attributes: map[string]schema.Attribute{ "automatic_schema_changes": schema.BoolAttribute{ - Computed: true, - PlanModifiers: []planmodifier.Bool{ - boolplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, - Default: booldefault.StaticBool(true), - Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, + Computed: true, + PlanModifiers: []planmodifier.Bool{}, + Optional: true, + Default: booldefault.StaticBool(true), + Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, }, "connection_id": schema.StringAttribute{ PlanModifiers: []planmodifier.String{ @@ -208,19 +204,15 @@ func (r *PipelineResource) Schema(ctx context.Context, req resource.SchemaReques Description: `Requires replacement if changed. `, }, "redshift": schema.SingleNestedAttribute{ - PlanModifiers: []planmodifier.Object{ - objectplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, + PlanModifiers: []planmodifier.Object{}, + Optional: true, Attributes: map[string]schema.Attribute{ "automatic_schema_changes": schema.BoolAttribute{ - Computed: true, - PlanModifiers: []planmodifier.Bool{ - boolplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, - Default: booldefault.StaticBool(true), - Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, + Computed: true, + PlanModifiers: []planmodifier.Bool{}, + Optional: true, + Default: booldefault.StaticBool(true), + Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, }, "compress_columns": schema.BoolAttribute{ Computed: true, @@ -378,19 +370,15 @@ func (r *PipelineResource) Schema(ctx context.Context, req resource.SchemaReques Description: `Requires replacement if changed. `, }, "s3_data_lake": schema.SingleNestedAttribute{ - PlanModifiers: []planmodifier.Object{ - objectplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, + PlanModifiers: []planmodifier.Object{}, + Optional: true, Attributes: map[string]schema.Attribute{ "automatic_schema_changes": schema.BoolAttribute{ - Computed: true, - PlanModifiers: []planmodifier.Bool{ - boolplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, - Default: booldefault.StaticBool(true), - Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, + Computed: true, + PlanModifiers: []planmodifier.Bool{}, + Optional: true, + Default: booldefault.StaticBool(true), + Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, }, "connection_id": schema.StringAttribute{ PlanModifiers: []planmodifier.String{ @@ -466,19 +454,15 @@ func (r *PipelineResource) Schema(ctx context.Context, req resource.SchemaReques Description: `Requires replacement if changed. `, }, "snowflake": schema.SingleNestedAttribute{ - PlanModifiers: []planmodifier.Object{ - objectplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, + PlanModifiers: []planmodifier.Object{}, + Optional: true, Attributes: map[string]schema.Attribute{ "automatic_schema_changes": schema.BoolAttribute{ - Computed: true, - PlanModifiers: []planmodifier.Bool{ - boolplanmodifier.RequiresReplaceIfConfigured(), - }, - Optional: true, - Default: booldefault.StaticBool(true), - Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, + Computed: true, + PlanModifiers: []planmodifier.Bool{}, + Optional: true, + Default: booldefault.StaticBool(true), + Description: `Whether schema changes detected during transformation should be handled automatically or not. Defaults to ` + "`" + `true` + "`" + `. Requires replacement if changed. ; Default: true`, }, "clustering_keys": schema.ListAttribute{ PlanModifiers: []planmodifier.List{ @@ -8603,6 +8587,14 @@ func (r *PipelineResource) Create(ctx context.Context, req resource.CreateReques data.RefreshFromSharedPipelineOutput(res1.PipelineOutput) refreshPlan(ctx, plan, &data, resp.Diagnostics) + existingDestination := data.Destination.Redshift + if existingDestination == nil { + data.Destination.Redshift = data.Destinations[0].Destination.Redshift + data.Destination.Snowflake = data.Destinations[0].Destination.Snowflake + data.Destination.DeltaLake = data.Destinations[0].Destination.DeltaLake + data.Destination.S3DataLake = data.Destinations[0].Destination.S3DataLake + } + // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } @@ -8651,6 +8643,14 @@ func (r *PipelineResource) Read(ctx context.Context, req resource.ReadRequest, r } data.RefreshFromSharedPipelineOutput(res.PipelineOutput) + existingDestination := data.Destination.Redshift + if existingDestination == nil { + data.Destination.Redshift = data.Destinations[0].Destination.Redshift + data.Destination.Snowflake = data.Destinations[0].Destination.Snowflake + data.Destination.DeltaLake = data.Destinations[0].Destination.DeltaLake + data.Destination.S3DataLake = data.Destinations[0].Destination.S3DataLake + } + // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } @@ -8671,6 +8671,15 @@ func (r *PipelineResource) Update(ctx context.Context, req resource.UpdateReques id := data.ID.ValueString() pipelineUpdate := *data.ToSharedPipelineUpdate() + + schemaChanges := data.Destination.Redshift.AutomaticSchemaChanges.ValueBool() + connectionId := data.Destination.Redshift.ConnectionID.ValueString() + var destUpdate *shared.DestinationUpdate = &shared.DestinationUpdate{ + ConnectionID: connectionId, + AutomaticSchemaChanges: &schemaChanges, + } + pipelineUpdate.DestinationUpdate = []shared.DestinationUpdate{*destUpdate} + request := operations.UpdatePipelineRequest{ ID: id, PipelineUpdate: pipelineUpdate, @@ -8724,6 +8733,14 @@ func (r *PipelineResource) Update(ctx context.Context, req resource.UpdateReques data.RefreshFromSharedPipelineOutput(res1.PipelineOutput) refreshPlan(ctx, plan, &data, resp.Diagnostics) + existingDestination := data.Destination.Redshift + if existingDestination == nil { + data.Destination.Redshift = data.Destinations[0].Destination.Redshift + data.Destination.Snowflake = data.Destinations[0].Destination.Snowflake + data.Destination.DeltaLake = data.Destinations[0].Destination.DeltaLake + data.Destination.S3DataLake = data.Destinations[0].Destination.S3DataLake + } + // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) }