diff --git a/client/model_interation.go b/client/model_integration.go similarity index 96% rename from client/model_interation.go rename to client/model_integration.go index 1fe7441..c8f8b2b 100644 --- a/client/model_interation.go +++ b/client/model_integration.go @@ -41,8 +41,8 @@ type IntegrationUpdateParam struct { // ConfigItemParam struct for ConfigItemParam type ConfigItemParam struct { - Key string `json:"key,omitempty"` - Value string `json:"value,omitempty"` + Key string `json:"key"` + Value string `json:"value"` } // PageNumResultIntegrationVO struct for PageNumResultIntegrationVO diff --git a/examples/resources/quick-start/resource.tf b/examples/resources/quick-start/resource.tf index 8b23e48..4948ff0 100644 --- a/examples/resources/quick-start/resource.tf +++ b/examples/resources/quick-start/resource.tf @@ -10,8 +10,8 @@ locals { env_id = "example" automq_byoc_host = "http://localhost:8081" - automq_byoc_access_key_id = "goiNxB8DfbbXJ85B" - automq_byoc_secret_key = "QPyEIcBXHKOBzEeeCZcpNSMRjXtj4XiS" + automq_byoc_access_key_id = "RSaIMzrFC0kAmS1x" + automq_byoc_secret_key = "msnGqOuaV5gblXPvkWfxg7Ao7Nq2iyMo" instance_deploy_region = "cn-hangzhou" instance_deploy_zone = "cn-hangzhou-b" @@ -48,11 +48,15 @@ resource "automq_kafka_instance" "example" { } ] compute_specs = { - aku = "12" + aku = "18" version = "1.1.0" } acl = true integrations = [automq_integration.example.id] + configs = { + "auto.create.topics.enable" = "false" + "log.retention.ms" = "3600000" + } } resource "automq_kafka_topic" "example" { @@ -72,7 +76,7 @@ resource "automq_kafka_user" "example" { environment_id = local.env_id kafka_instance_id = automq_kafka_instance.example.id - username = "automq_kafka_user" + username = "automq_kafka_user-1" password = "automq_kafka_user" } diff --git a/go.mod b/go.mod index 3fbfca1..dc10e52 100644 --- a/go.mod +++ b/go.mod @@ -10,11 +10,16 @@ require ( github.com/hashicorp/terraform-plugin-log v0.9.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.34.0 github.com/hashicorp/terraform-plugin-testing v1.10.0 + github.com/stretchr/testify v1.9.0 github.com/testcontainers/testcontainers-go v0.32.0 github.com/wiremock/go-wiremock v1.9.0 ) -require github.com/hashicorp/go-retryablehttp v0.7.7 // indirect +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect +) require ( dario.cat/mergo v1.0.0 // indirect diff --git a/internal/models/acl.go b/internal/models/acl.go index 1f59104..54e3e28 100644 --- a/internal/models/acl.go +++ b/internal/models/acl.go @@ -1,8 +1,10 @@ package models import ( + "fmt" "terraform-provider-automq/client" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" ) @@ -19,30 +21,42 @@ type KafkaAclResourceModel struct { Permission types.String `tfsdk:"permission"` } -func ExpandKafkaACLResource(acl KafkaAclResourceModel, request *client.KafkaAclBindingParam) { +func ExpandKafkaACLResource(acl KafkaAclResourceModel, request *client.KafkaAclBindingParam) diag.Diagnostics { request.AccessControlParam = client.KafkaControlParam{} request.ResourcePatternParam = client.KafkaResourcePatternParam{} request.AccessControlParam.OperationGroup = acl.OperationGroup.ValueString() request.AccessControlParam.PermissionType = acl.Permission.ValueString() - request.AccessControlParam.User = ParsePrincipalUser(acl.Principal.ValueString()) + user, err := ParsePrincipalUser(acl.Principal.ValueString()) + if err != nil { + return diag.Diagnostics{diag.NewErrorDiagnostic("Failed to parse principal", err.Error())} + } + request.AccessControlParam.User = user request.ResourcePatternParam.Name = acl.ResourceName.ValueString() request.ResourcePatternParam.PatternType = acl.PatternType.ValueString() request.ResourcePatternParam.ResourceType = acl.ResourceType.ValueString() + return nil } -func ParsePrincipalUser(principal string) string { - if condition := principal[:5]; condition == "User:" { - return principal[5:] +func ParsePrincipalUser(principal string) (string, error) { + if len(principal) < 5 { + return "", fmt.Errorf("invalid principal format: %s", principal) + } + if condition := principal[:5]; condition != "User:" { + return "", fmt.Errorf("invalid principal format: %s", principal) + } + user := principal[5:] + if user == "" { + return "", fmt.Errorf("invalid principal format: %s", principal) } - return principal + return user, nil } -func FlattenKafkaACLResource(acl *client.KafkaAclBindingVO, resource *KafkaAclResourceModel) { +func FlattenKafkaACLResource(acl *client.KafkaAclBindingVO, resource *KafkaAclResourceModel) diag.Diagnostics { aclId, err := client.GenerateAclID(*acl) if err != nil { - return + return diag.Diagnostics{diag.NewErrorDiagnostic("Failed to generate ACL ID", err.Error())} } resource.ID = types.StringValue(aclId) @@ -52,4 +66,5 @@ func FlattenKafkaACLResource(acl *client.KafkaAclBindingVO, resource *KafkaAclRe resource.Principal = types.StringValue("User:" + acl.AccessControl.User) resource.OperationGroup = types.StringValue(acl.AccessControl.OperationGroup.Name) resource.Permission = types.StringValue(acl.AccessControl.PermissionType) + return nil } diff --git a/internal/models/acl_test.go b/internal/models/acl_test.go new file mode 100644 index 0000000..1b8864d --- /dev/null +++ b/internal/models/acl_test.go @@ -0,0 +1,152 @@ +package models + +import ( + "terraform-provider-automq/client" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stretchr/testify/assert" +) + +func TestExpandKafkaACLResource(t *testing.T) { + tests := []struct { + acl KafkaAclResourceModel + expected client.KafkaAclBindingParam + }{ + { + acl: KafkaAclResourceModel{ + EnvironmentID: types.StringValue("env-123"), + KafkaInstance: types.StringValue("kafka-123"), + ID: types.StringValue("acl-123"), + ResourceType: types.StringValue("topic"), + ResourceName: types.StringValue("test-topic"), + PatternType: types.StringValue("literal"), + Principal: types.StringValue("User:test-user"), + OperationGroup: types.StringValue("read"), + Permission: types.StringValue("allow"), + }, + expected: client.KafkaAclBindingParam{ + AccessControlParam: client.KafkaControlParam{ + OperationGroup: "read", + PermissionType: "allow", + User: "test-user", + }, + ResourcePatternParam: client.KafkaResourcePatternParam{ + Name: "test-topic", + PatternType: "literal", + ResourceType: "topic", + }, + }, + }, + { + acl: KafkaAclResourceModel{ + Principal: types.StringValue("admin"), + }, + expected: client.KafkaAclBindingParam{ + AccessControlParam: client.KafkaControlParam{ + User: "admin", + }, + }, + }, + } + + for _, test := range tests { + request := &client.KafkaAclBindingParam{} + ExpandKafkaACLResource(test.acl, request) + + assert.Equal(t, test.expected.AccessControlParam.OperationGroup, request.AccessControlParam.OperationGroup) + assert.Equal(t, test.expected.AccessControlParam.PermissionType, request.AccessControlParam.PermissionType) + assert.Equal(t, test.expected.AccessControlParam.User, request.AccessControlParam.User) + assert.Equal(t, test.expected.ResourcePatternParam.Name, request.ResourcePatternParam.Name) + assert.Equal(t, test.expected.ResourcePatternParam.PatternType, request.ResourcePatternParam.PatternType) + assert.Equal(t, test.expected.ResourcePatternParam.ResourceType, request.ResourcePatternParam.ResourceType) + } +} + +func TestParsePrincipalUser(t *testing.T) { + tests := []struct { + principal string + hasDiag bool + expected string + }{ + { + principal: "User:test-user", + expected: "test-user", + hasDiag: false, + }, + { + principal: "User:admin", + expected: "admin", + hasDiag: false, + }, + { + principal: "User:admin:admin", + expected: "admin:admin", + hasDiag: false, + }, + { + principal: "User:", + expected: "", + hasDiag: true, + }, + { + principal: "User", + expected: "", + hasDiag: true, + }, + } + for _, test := range tests { + user, err := ParsePrincipalUser(test.principal) + + if test.hasDiag { + assert.NotNil(t, err) + } else { + assert.Nil(t, err) + assert.Equal(t, test.expected, user) + } + } +} + +func TestFlattenKafkaACLResource(t *testing.T) { + tests := []struct { + acl *client.KafkaAclBindingVO + expected KafkaAclResourceModel + }{ + { + acl: &client.KafkaAclBindingVO{ + ResourcePattern: &client.KafkaResourcePatternVO{ + ResourceType: "topic", + Name: "test-topic", + PatternType: "literal", + }, + AccessControl: &client.KafkaAccessControlVO{ + User: "test-user", + OperationGroup: client.OperationGroup{Name: "read"}, + PermissionType: "allow", + }, + }, + expected: KafkaAclResourceModel{ + ResourceType: types.StringValue("topic"), + ResourceName: types.StringValue("test-topic"), + PatternType: types.StringValue("literal"), + Principal: types.StringValue("User:test-user"), + OperationGroup: types.StringValue("read"), + Permission: types.StringValue("allow"), + }, + }, + } + + for _, test := range tests { + resource := &KafkaAclResourceModel{} + diag := FlattenKafkaACLResource(test.acl, resource) + + assert.Nil(t, diag) + + assert.Equal(t, test.expected.ResourceType.ValueString(), resource.ResourceType.ValueString()) + assert.Equal(t, test.expected.ResourceName.ValueString(), resource.ResourceName.ValueString()) + assert.Equal(t, test.expected.PatternType.ValueString(), resource.PatternType.ValueString()) + assert.Equal(t, test.expected.Principal.ValueString(), resource.Principal.ValueString()) + assert.Equal(t, test.expected.OperationGroup.ValueString(), resource.OperationGroup.ValueString()) + assert.Equal(t, test.expected.Permission.ValueString(), resource.Permission.ValueString()) + } +} diff --git a/internal/models/intergation.go b/internal/models/intergation.go index 211d79f..7fdc4f5 100644 --- a/internal/models/intergation.go +++ b/internal/models/intergation.go @@ -8,6 +8,15 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) +const ( + IntegrationTypeCloudWatch = "cloudWatch" + IntegrationTypeKafka = "kafka" + IntegrationTypePrometheus = "prometheus" + + SecurityProtocolSASLPlain = "SASL_PLAINTEXT" + SecurityProtocolPlainText = "PLAINTEXT" +) + // IntegrationResourceModel describes the resource data model. type IntegrationResourceModel struct { EnvironmentID types.String `tfsdk:"environment_id"` @@ -43,7 +52,7 @@ func ExpandIntergationResource(in *client.IntegrationParam, integration Integrat integrationType := integration.Type.ValueString() in.Name = integration.Name.ValueString() in.Type = &integrationType - if integrationType == "cloudWatch" { + if integrationType == IntegrationTypeCloudWatch { in.Name = integration.Name.ValueString() if integration.CloudWatchConfig == nil { return diag.NewErrorDiagnostic("Missing required field", "cloud_watch_config is required for CloudWatch integration") @@ -57,7 +66,7 @@ func ExpandIntergationResource(in *client.IntegrationParam, integration Integrat Value: integration.CloudWatchConfig.NameSpace.ValueString(), }, } - } else if integrationType == "kafka" { + } else if integrationType == IntegrationTypeKafka { in.Name = integration.Name.ValueString() if integration.EndPoint.IsNull() || integration.EndPoint.IsUnknown() { return diag.NewErrorDiagnostic("Missing required field", "endpoint is required for Kafka integration") @@ -69,7 +78,7 @@ func ExpandIntergationResource(in *client.IntegrationParam, integration Integrat if integration.KafkaConfig.SecurityProtocol.ValueString() == "" { return diag.NewErrorDiagnostic("Missing required field", "security_protocol is required for Kafka integration") } - if integration.KafkaConfig.SecurityProtocol.ValueString() == "SASL_PLAINTEXT" { + if integration.KafkaConfig.SecurityProtocol.ValueString() == SecurityProtocolSASLPlain { if integration.KafkaConfig.SaslMechanism.ValueString() == "" { return diag.NewErrorDiagnostic("Missing required field", "sasl_mechanism is required for Kafka integration") } @@ -97,7 +106,7 @@ func ExpandIntergationResource(in *client.IntegrationParam, integration Integrat Value: integration.KafkaConfig.SaslPassword.ValueString(), }, } - } else if integration.KafkaConfig.SecurityProtocol.ValueString() == "PLAINTEXT" { + } else if integration.KafkaConfig.SecurityProtocol.ValueString() == SecurityProtocolPlainText { in.Config = []client.ConfigItemParam{ { Key: "security_protocol", @@ -105,7 +114,7 @@ func ExpandIntergationResource(in *client.IntegrationParam, integration Integrat }, } } - } else if integrationType == "prometheus" { + } else if integrationType == IntegrationTypePrometheus { in.Name = integration.Name.ValueString() if integration.EndPoint.IsNull() || integration.EndPoint.IsUnknown() { return diag.NewErrorDiagnostic("Missing required field", "endpoint is required for Prometheus integration") @@ -125,13 +134,13 @@ func FlattenIntergrationResource(integration *client.IntegrationVO, resource *In } func flattenIntergrationTypeConfig(iType string, config map[string]interface{}, resource *IntegrationResourceModel) { - if iType == "Kafka" { + if iType == IntegrationTypeKafka { flattenKafkaConfig(config, resource) return - } else if iType == "CloudWatch" { + } else if iType == IntegrationTypeCloudWatch { flattenCloudWatchConfig(config, resource) return - } else if iType == "Prometheus" { + } else if iType == IntegrationTypePrometheus { return } } @@ -139,7 +148,7 @@ func flattenIntergrationTypeConfig(iType string, config map[string]interface{}, func flattenKafkaConfig(config map[string]interface{}, resource *IntegrationResourceModel) { resource.KafkaConfig = &KafkaIntegrationConfig{} if v, ok := config["securityProtocol"]; ok { - resource.KafkaConfig.SaslMechanism = types.StringValue(v.(string)) + resource.KafkaConfig.SecurityProtocol = types.StringValue(v.(string)) } if v, ok := config["saslMechanism"]; ok { resource.KafkaConfig.SaslMechanism = types.StringValue(v.(string)) @@ -154,7 +163,7 @@ func flattenKafkaConfig(config map[string]interface{}, resource *IntegrationReso func flattenCloudWatchConfig(config map[string]interface{}, resource *IntegrationResourceModel) { resource.CloudWatchConfig = &CloudWatchIntegrationConfig{} - if v, ok := config["namespece"]; ok { + if v, ok := config["namespace"]; ok { resource.CloudWatchConfig.NameSpace = types.StringValue(v.(string)) } } diff --git a/internal/models/intergation_test.go b/internal/models/intergation_test.go new file mode 100644 index 0000000..f4d1934 --- /dev/null +++ b/internal/models/intergation_test.go @@ -0,0 +1,156 @@ +package models + +import ( + "testing" + "time" + + "terraform-provider-automq/client" + + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stretchr/testify/assert" +) + +const ( + kafkaType = IntegrationTypeKafka + cloudWatchType = IntegrationTypeCloudWatch + + securityProtocol = "SASL_PLAINTEXT" + saslMechanism = "PLAIN" + saslUsername = "user1" + saslPassword = "pass1" + testNamespace = "test-namespace" +) + +func TestFlattenIntergationResource(t *testing.T) { + tests := []struct { + integration *client.IntegrationVO + expected IntegrationResourceModel + }{ + { + integration: &client.IntegrationVO{ + Code: "123", + Name: "test-kafka", + Type: kafkaType, + GmtCreate: time.Now(), + GmtModified: time.Now(), + Config: map[string]interface{}{ + "securityProtocol": securityProtocol, + "saslMechanism": saslMechanism, + "saslUsername": saslUsername, + "saslPassword": saslPassword, + }, + }, + expected: IntegrationResourceModel{ + ID: types.StringValue("123"), + Name: types.StringValue("test-kafka"), + Type: types.StringValue(kafkaType), + KafkaConfig: &KafkaIntegrationConfig{ + SecurityProtocol: types.StringValue(securityProtocol), + SaslMechanism: types.StringValue(saslMechanism), + SaslUsername: types.StringValue(saslUsername), + SaslPassword: types.StringValue(saslPassword), + }, + }, + }, + { + integration: &client.IntegrationVO{ + Code: "456", + Name: "test-cloudwatch", + Type: cloudWatchType, + GmtCreate: time.Now(), + GmtModified: time.Now(), + Config: map[string]interface{}{ + "namespace": testNamespace, + }, + }, + expected: IntegrationResourceModel{ + ID: types.StringValue("456"), + Name: types.StringValue("test-cloudwatch"), + Type: types.StringValue(cloudWatchType), + CloudWatchConfig: &CloudWatchIntegrationConfig{ + NameSpace: types.StringValue(testNamespace), + }, + }, + }, + } + + for _, test := range tests { + resource := &IntegrationResourceModel{} + FlattenIntergrationResource(test.integration, resource) + + assert.Equal(t, test.expected.ID.ValueString(), resource.ID.ValueString()) + assert.Equal(t, test.expected.Name.ValueString(), resource.Name.ValueString()) + assert.Equal(t, test.expected.Type.ValueString(), resource.Type.ValueString()) + if test.expected.KafkaConfig != nil { + assert.Equal(t, test.expected.KafkaConfig.SecurityProtocol.ValueString(), resource.KafkaConfig.SecurityProtocol.ValueString()) + assert.Equal(t, test.expected.KafkaConfig.SaslMechanism.ValueString(), resource.KafkaConfig.SaslMechanism.ValueString()) + assert.Equal(t, test.expected.KafkaConfig.SaslUsername.ValueString(), resource.KafkaConfig.SaslUsername.ValueString()) + assert.Equal(t, test.expected.KafkaConfig.SaslPassword.ValueString(), resource.KafkaConfig.SaslPassword.ValueString()) + } + if test.expected.CloudWatchConfig != nil { + assert.Equal(t, test.expected.CloudWatchConfig.NameSpace.ValueString(), resource.CloudWatchConfig.NameSpace.ValueString()) + } + } +} + +func TestExpandIntergationResource(t *testing.T) { + cloudwatch := cloudWatchType + kafka := kafkaType + + tests := []struct { + integration IntegrationResourceModel + expected client.IntegrationParam + }{ + { + integration: IntegrationResourceModel{ + Name: types.StringValue("test-kafka"), + Type: types.StringValue(kafka), + EndPoint: types.StringValue("http://localhost:9092"), + KafkaConfig: &KafkaIntegrationConfig{ + SecurityProtocol: types.StringValue("SASL_PLAINTEXT"), + SaslMechanism: types.StringValue("PLAIN"), + SaslUsername: types.StringValue("user1"), + SaslPassword: types.StringValue("pass1"), + }, + }, + expected: client.IntegrationParam{ + Name: "test-kafka", + Type: &kafka, + EndPoint: "http://localhost:9092", + Config: []client.ConfigItemParam{ + {Key: "security_protocol", Value: "SASL_PLAINTEXT"}, + {Key: "sasl_mechanism", Value: "PLAIN"}, + {Key: "sasl_username", Value: "user1"}, + {Key: "sasl_password", Value: "pass1"}, + }, + }, + }, + { + integration: IntegrationResourceModel{ + Name: types.StringValue("test-cloudwatch"), + Type: types.StringValue(cloudwatch), + CloudWatchConfig: &CloudWatchIntegrationConfig{ + NameSpace: types.StringValue("test-namespace"), + }, + }, + expected: client.IntegrationParam{ + Name: "test-cloudwatch", + Type: &cloudwatch, + Config: []client.ConfigItemParam{ + {Key: "namespace", Value: "test-namespace"}, + }, + }, + }, + } + + for _, test := range tests { + in := client.IntegrationParam{} + diag := ExpandIntergationResource(&in, test.integration) + + assert.Nil(t, diag) + assert.Equal(t, test.expected.Name, in.Name) + assert.Equal(t, test.expected.Type, in.Type) + assert.Equal(t, test.expected.EndPoint, in.EndPoint) + assert.Equal(t, test.expected.Config, in.Config) + } +} diff --git a/internal/models/topic.go b/internal/models/topic.go index 8a7576e..55ea579 100644 --- a/internal/models/topic.go +++ b/internal/models/topic.go @@ -1,6 +1,7 @@ package models import ( + "strings" "terraform-provider-automq/client" "github.com/hashicorp/terraform-plugin-framework/diag" @@ -30,7 +31,7 @@ func ExpandKafkaTopicResource(topic KafkaTopicResourceModel, request *client.Top Value: config.ValueString(), } if name == "cleanup.policy" { - request.CompactStrategy = config.ValueString() + request.CompactStrategy = strings.ToUpper(config.ValueString()) } i += 1 } diff --git a/internal/models/topic_test.go b/internal/models/topic_test.go new file mode 100644 index 0000000..dfef675 --- /dev/null +++ b/internal/models/topic_test.go @@ -0,0 +1,115 @@ +package models + +import ( + "testing" + + "terraform-provider-automq/client" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stretchr/testify/assert" +) + +func TestExpandKafkaTopicResource(t *testing.T) { + config1, _ := types.MapValue(types.StringType, map[string]attr.Value{ + "delete.retention.ms": types.StringValue("86400000"), + "cleanup.policy": types.StringValue("compact"), + }) + + config2, _ := types.MapValue(types.StringType, map[string]attr.Value{ + "retention.ms": types.StringValue("3600000"), + }) + + tests := []struct { + input KafkaTopicResourceModel + expected client.TopicCreateParam + }{ + { + input: KafkaTopicResourceModel{ + EnvironmentID: types.StringValue("env-123"), + KafkaInstance: types.StringValue("kf-123"), + Name: types.StringValue("test-topic"), + Partition: types.Int64Value(3), + Configs: config1, + }, + expected: client.TopicCreateParam{ + Name: "test-topic", + Partition: 3, + CompactStrategy: "COMPACT", + Configs: []client.ConfigItemParam{ + {Key: "cleanup.policy", Value: "compact"}, + {Key: "delete.retention.ms", Value: "86400000"}, + }, + }, + }, + { + input: KafkaTopicResourceModel{ + EnvironmentID: types.StringValue("env-456"), + KafkaInstance: types.StringValue("kf-456"), + Name: types.StringValue("another-topic"), + Partition: types.Int64Value(1), + Configs: config2, + }, + expected: client.TopicCreateParam{ + Name: "another-topic", + Partition: 1, + CompactStrategy: "DELETE", + Configs: []client.ConfigItemParam{ + {Key: "retention.ms", Value: "3600000"}, + }, + }, + }, + } + + for _, test := range tests { + request := &client.TopicCreateParam{} + ExpandKafkaTopicResource(test.input, request) + + assert.Equal(t, test.expected.Name, request.Name) + assert.Equal(t, test.expected.Partition, request.Partition) + assert.Equal(t, test.expected.CompactStrategy, request.CompactStrategy) + assert.ElementsMatch(t, test.expected.Configs, request.Configs) + } +} + +func TestFlattenKafkaTopic(t *testing.T) { + tests := []struct { + input *client.TopicVO + expected KafkaTopicResourceModel + }{ + { + input: &client.TopicVO{ + TopicId: "topic-123", + Name: "test-topic", + Partition: 3, + }, + expected: KafkaTopicResourceModel{ + TopicID: types.StringValue("topic-123"), + Name: types.StringValue("test-topic"), + Partition: types.Int64Value(3), + }, + }, + { + input: &client.TopicVO{ + TopicId: "topic-456", + Name: "another-topic", + Partition: 1, + }, + expected: KafkaTopicResourceModel{ + TopicID: types.StringValue("topic-456"), + Name: types.StringValue("another-topic"), + Partition: types.Int64Value(1), + }, + }, + } + + for _, test := range tests { + resource := &KafkaTopicResourceModel{} + diag := FlattenKafkaTopic(test.input, resource) + + assert.Nil(t, diag) + assert.Equal(t, test.expected.TopicID.ValueString(), resource.TopicID.ValueString()) + assert.Equal(t, test.expected.Name.ValueString(), resource.Name.ValueString()) + assert.Equal(t, test.expected.Partition.ValueInt64(), resource.Partition.ValueInt64()) + } +} diff --git a/internal/models/util_test.go b/internal/models/util_test.go new file mode 100644 index 0000000..80f4d60 --- /dev/null +++ b/internal/models/util_test.go @@ -0,0 +1,63 @@ +package models + +import ( + "testing" + + "terraform-provider-automq/client" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stretchr/testify/assert" +) + +func TestCreateConfigFromMapValue(t *testing.T) { + planConfig := types.MapValueMust(types.StringType, map[string]attr.Value{ + "key1": types.StringValue("value1"), + "key2": types.StringValue("value2"), + }) + + expected := []client.ConfigItemParam{ + {Key: "key1", Value: "value1"}, + {Key: "key2", Value: "value2"}, + } + + result := CreateConfigFromMapValue(planConfig) + + assert.ElementsMatch(t, expected, result) +} + +func TestCreateMapFromConfigValue(t *testing.T) { + configs := []client.ConfigItemParam{ + {Key: "key1", Value: "value1"}, + {Key: "key2", Value: "value2"}, + } + + expected := types.MapValueMust(types.StringType, map[string]attr.Value{ + "key1": types.StringValue("value1"), + "key2": types.StringValue("value2"), + }) + + result := CreateMapFromConfigValue(configs) + + assert.True(t, MapsEqual(expected, result)) +} + +func TestMapsEqual(t *testing.T) { + map1 := types.MapValueMust(types.StringType, map[string]attr.Value{ + "key1": types.StringValue("value1"), + "key2": types.StringValue("value2"), + }) + + map2 := types.MapValueMust(types.StringType, map[string]attr.Value{ + "key1": types.StringValue("value1"), + "key2": types.StringValue("value2"), + }) + + map3 := types.MapValueMust(types.StringType, map[string]attr.Value{ + "key1": types.StringValue("value1"), + "key2": types.StringValue("different_value"), + }) + + assert.True(t, MapsEqual(map1, map2)) + assert.False(t, MapsEqual(map1, map3)) +} diff --git a/internal/provider/resource_acl.go b/internal/provider/resource_acl.go index d5a3b74..906e914 100644 --- a/internal/provider/resource_acl.go +++ b/internal/provider/resource_acl.go @@ -142,7 +142,10 @@ func (r *KafkaAclResource) Create(ctx context.Context, req resource.CreateReques return } // flatten the response and set the ID to the state - models.FlattenKafkaACLResource(out, &plan) + resp.Diagnostics.Append(models.FlattenKafkaACLResource(out, &plan)...) + if resp.Diagnostics.HasError() { + return + } tflog.Trace(ctx, "created a Kafka ACL resource") resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) @@ -167,7 +170,10 @@ func (r *KafkaAclResource) Read(ctx context.Context, req resource.ReadRequest, r return } // flatten the response and set the state - models.FlattenKafkaACLResource(out, &state) + resp.Diagnostics.Append(models.FlattenKafkaACLResource(out, &state)...) + if resp.Diagnostics.HasError() { + return + } resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) }