Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into yifei_gen_docs
Browse files Browse the repository at this point in the history
  • Loading branch information
cyifei2023 committed Aug 15, 2024
2 parents 3520c49 + 2b8ce3b commit b4ef915
Show file tree
Hide file tree
Showing 23 changed files with 1,023 additions and 74 deletions.
41 changes: 39 additions & 2 deletions client/api_kafka_instance.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@ import (

const (
InstancePath = "/api/v1/instances"
InstanceConfigPath = "/api/v1/instances/%s/configurations"
GetInstancePath = "/api/v1/instances/%s"
DeleteInstancePath = "/api/v1/instances/%s"
ReplaceInstanceIntergationPath = "/api/v1/instances/%s/integrations"
TurnOnInstanceAclPath = "/api/v1/instances/%s/acls:enable"
GetInstanceEndpointsPath = "/api/v1/instances/%s/endpoints"
UpdateInstanceBasicInfoPath = "/api/v1/instances/%s/basic"
UpdateInstanceVersionPath = "/api/v1/instances/%s/versions/%s"
UpdateInstanceConfigPath = "/api/v1/instances/%s/configurations"
UpdateInstanceComputeSpecsPath = "/api/v1/instances/%s/spec"
)

Expand Down Expand Up @@ -45,6 +45,29 @@ func (c *Client) GetKafkaInstance(ctx context.Context, instanceId string) (*Kafk
return &instance, nil
}

func (c *Client) GetKafkaInstanceByName(ctx context.Context, name string) (*KafkaInstanceResponse, error) {
queryParams := make(map[string]string)
queryParams["keyword"] = name
body, err := c.Get(ctx, InstancePath, queryParams)
if err != nil {
return nil, err
}
instances := KafkaInstanceResponseList{}
err = json.Unmarshal(body, &instances)
if err != nil {
return nil, err
}
if len(instances.List) > 0 {
for _, item := range instances.List {
if item.DisplayName == name {
return &item, nil
}
}
return nil, &ErrorResponse{Code: 404, ErrorMessage: "kafka instance not found"}
}
return nil, &ErrorResponse{Code: 404, ErrorMessage: "kafka instance not found"}
}

func (c *Client) DeleteKafkaInstance(ctx context.Context, instanceId string) error {
_, err := c.Delete(ctx, fmt.Sprintf(DeleteInstancePath, instanceId))
if err != nil {
Expand Down Expand Up @@ -85,6 +108,20 @@ func (c *Client) GetInstanceEndpoints(ctx context.Context, instanceId string) ([
return endpoints.List, nil
}

func (c *Client) GetInstanceConfigs(ctx context.Context, instanceId string) ([]ConfigItemParam, error) {
path := fmt.Sprintf(InstanceConfigPath, instanceId)
body, err := c.Get(ctx, path, nil)
if err != nil {
return nil, err
}
instance := PageNumResultConfigItemVO{}
err = json.Unmarshal(body, &instance)
if err != nil {
return nil, err
}
return instance.List, nil
}

func (c *Client) UpdateKafkaInstanceBasicInfo(ctx context.Context, instanceId string, updateParam InstanceBasicParam) (*KafkaInstanceResponse, error) {
return c.updateInstance(ctx, instanceId, updateParam, UpdateInstanceBasicInfoPath)
}
Expand All @@ -95,7 +132,7 @@ func (c *Client) UpdateKafkaInstanceVersion(ctx context.Context, instanceId stri
}

func (c *Client) UpdateKafkaInstanceConfig(ctx context.Context, instanceId string, updateParam InstanceConfigParam) (*KafkaInstanceResponse, error) {
return c.updateInstance(ctx, instanceId, updateParam, UpdateInstanceConfigPath)
return c.updateInstance(ctx, instanceId, updateParam, InstanceConfigPath)
}

func (c *Client) UpdateKafkaInstanceComputeSpecs(ctx context.Context, instanceId string, updateParam SpecificationUpdateParam) (*KafkaInstanceResponse, error) {
Expand Down
2 changes: 1 addition & 1 deletion client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ func (e *ErrorResponse) Error() string {
if e.APIError.ErrorModel.Code != "" {
return fmt.Sprintf("Error %d: %s: %s", e.Code, e.APIError.ErrorModel.Code, e.APIError.ErrorModel.Message)
}
return fmt.Sprintf("Error %d: %s, detail: %s", e.Code, e.ErrorMessage, e.Err.Error())
return fmt.Sprintf("Error Code %d: %s", e.Code, e.ErrorMessage)
}

func NewClient(ctx context.Context, host string, credentials AuthCredentials) (*Client, error) {
Expand Down
4 changes: 2 additions & 2 deletions client/model_interation.go → client/model_integration.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ type IntegrationUpdateParam struct {

// ConfigItemParam struct for ConfigItemParam
type ConfigItemParam struct {
Key string `json:"key,omitempty"`
Value string `json:"value,omitempty"`
Key string `json:"key"`
Value string `json:"value"`
}

// PageNumResultIntegrationVO struct for PageNumResultIntegrationVO
Expand Down
9 changes: 9 additions & 0 deletions client/model_kafka_instance.go
Original file line number Diff line number Diff line change
Expand Up @@ -124,3 +124,12 @@ type InstanceAccessInfoVO struct {
Mechanisms string `json:"mechanisms"`
BootstrapServers string `json:"bootstrapServers"`
}

// PageNumResultConfigItemVO struct for PageNumResultConfigItemVO
type PageNumResultConfigItemVO struct {
PageNum *int32 `json:"pageNum,omitempty"`
PageSize *int32 `json:"pageSize,omitempty"`
Total *int64 `json:"total,omitempty"`
List []ConfigItemParam `json:"list,omitempty"`
TotalPage *int64 `json:"totalPage,omitempty"`
}
69 changes: 69 additions & 0 deletions docs/data-sources/kafka_instance.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "automq_kafka_instance Data Source - automq"
subcategory: ""
description: |-
AutoMQ Kafka instance resource
---

# automq_kafka_instance (Data Source)

AutoMQ Kafka instance resource



<!-- schema generated by tfplugindocs -->
## Schema

### Required

- `environment_id` (String) Target Kafka environment

### Optional

- `id` (String) The ID of the Kafka instance
- `name` (String) The name of the Kafka instance

### Read-Only

- `acl` (Boolean) The ACL of the Kafka instance
- `cloud_provider` (String) The cloud provider of the Kafka instance
- `compute_specs` (Attributes) The compute specs of the Kafka instance (see [below for nested schema](#nestedatt--compute_specs))
- `configs` (Map of String) Additional configuration for the Kafka topic
- `created_at` (String)
- `description` (String) The description of the Kafka instance
- `endpoints` (Attributes List) The endpoints of the Kafka instance (see [below for nested schema](#nestedatt--endpoints))
- `instance_status` (String) The status of the Kafka instance
- `integrations` (List of String) The integrations of the Kafka instance
- `last_updated` (String)
- `networks` (Attributes List) The networks of the Kafka instance (see [below for nested schema](#nestedatt--networks))
- `region` (String) The region of the Kafka instance

<a id="nestedatt--compute_specs"></a>
### Nested Schema for `compute_specs`

Read-Only:

- `aku` (Number) The template of the compute specs
- `version` (String) The version of the compute specs


<a id="nestedatt--endpoints"></a>
### Nested Schema for `endpoints`

Read-Only:

- `bootstrap_servers` (String) The bootstrap servers of the endpoint
- `display_name` (String) The display name of the endpoint
- `mechanisms` (String) The mechanisms of the endpoint
- `network_type` (String) The network type of the endpoint
- `protocol` (String) The protocol of the endpoint


<a id="nestedatt--networks"></a>
### Nested Schema for `networks`

Read-Only:

- `subnets` (List of String) The subnets of the network
- `zone` (String) The zone of the network
26 changes: 2 additions & 24 deletions docs/index.md
Original file line number Diff line number Diff line change
@@ -1,27 +1,5 @@
---
page_title: "Provider: Redpanda"
page_title: "Provider: AutoMQ"
description: |-
The Redpanda Data Terraform provider is used to manage Redpanda Dedicated and Cloud clusters and Kafka resources within them. To connect to a Redpanda Cloud cluster, a client_id and client_secret are required.
---

# Redpanda Provider

!!! THIS IS AN ALPHA RELEASE !!!

Please be aware that all features are subject to change and may not be fully supported at this time.

The Redpanda provider is designed for managing Redpanda clusters and Kafka resources in Redpanda Dedicated and Cloud environments. It supports the provisioning, management, and configuration of clusters and Kafka resources, facilitating seamless integration into Terraform workflows.

<!-- schema generated by tfplugindocs -->
## Schema

### Optional

- `automq_byoc_access_key_id` (String) Set the Access Key Id of client. AutoMQ Cloud (BYOC) requires Access Keys to manage access and authentication to different parts of the service. An Access Key consists of an access key id and a secret key. You can create and manage Access Keys by using the AutoMQ Cloud BYOC Console. Learn more about AutoMQ Cloud BYOC Console access [here](https://docs.automq.com/automq-cloud/manage-identities-and-access).
- `automq_byoc_host` (String) Set the AutoMQ BYOC environment endpoint. The endpoint like http://{hostname}:8080. You can get this endpoint when deploy environment complete.
- `automq_byoc_secret_key` (String) Set the Secret Access Key of client. AutoMQ Cloud (BYOC) requires Access Keys to manage access and authentication to different parts of the service. An Access Key consists of an access key id and a secret key. You can create and manage Access Keys by using the AutoMQ Cloud BYOC Console. Learn more about AutoMQ Cloud BYOC Console access [here](https://docs.automq.com/automq-cloud/manage-identities-and-access).

## Authentication with Redpanda Cloud

This provider requires a `client_id` and `client_secret` for authentication with Redpanda Cloud services, enabling users to securely manage their Redpanda resources. You can get these by creating an account in [Redpanda Cloud](https://cloudv2.redpanda.com/home) and then [creating a client in the Redpanda Cloud UI](https://cloudv2.redpanda.com/clients).
The AutoMQ Terraform provider is used to manage AutoMQ Cloud BYOC and SaaS instances and Kafka resources within them.
32 changes: 32 additions & 0 deletions examples/data-sources/automq_kafka_instance/data-sources.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
terraform {
required_providers {
automq = {
source = "hashicorp.com/edu/automq"
}
}
}


locals {
env_id = "example"

automq_byoc_host = "http://localhost:8081"
automq_byoc_access_key_id = "RSaIMzrFC0kAmS1x"
automq_byoc_secret_key = "msnGqOuaV5gblXPvkWfxg7Ao7Nq2iyMo"
}


provider "automq" {
automq_byoc_host = local.automq_byoc_host
automq_byoc_access_key_id = local.automq_byoc_access_key_id
automq_byoc_secret_key = local.automq_byoc_secret_key
}

data "automq_kafka_instance" "example" {
environment_id = local.env_id
name = "automq-example-1"
}

output "example-id" {
value = data.automq_kafka_instance.example.id
}
12 changes: 8 additions & 4 deletions examples/resources/quick-start/resource.tf
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ locals {
env_id = "example"

automq_byoc_host = "http://localhost:8081"
automq_byoc_access_key_id = "goiNxB8DfbbXJ85B"
automq_byoc_secret_key = "QPyEIcBXHKOBzEeeCZcpNSMRjXtj4XiS"
automq_byoc_access_key_id = "RSaIMzrFC0kAmS1x"
automq_byoc_secret_key = "msnGqOuaV5gblXPvkWfxg7Ao7Nq2iyMo"

instance_deploy_region = "cn-hangzhou"
instance_deploy_zone = "cn-hangzhou-b"
Expand Down Expand Up @@ -48,11 +48,15 @@ resource "automq_kafka_instance" "example" {
}
]
compute_specs = {
aku = "12"
aku = "18"
version = "1.1.0"
}
acl = true
integrations = [automq_integration.example.id]
configs = {
"auto.create.topics.enable" = "false"
"log.retention.ms" = "3600000"
}
}

resource "automq_kafka_topic" "example" {
Expand All @@ -72,7 +76,7 @@ resource "automq_kafka_user" "example" {
environment_id = local.env_id

kafka_instance_id = automq_kafka_instance.example.id
username = "automq_kafka_user"
username = "automq_kafka_user-1"
password = "automq_kafka_user"
}

Expand Down
7 changes: 6 additions & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,16 @@ require (
github.com/hashicorp/terraform-plugin-log v0.9.0
github.com/hashicorp/terraform-plugin-sdk/v2 v2.34.0
github.com/hashicorp/terraform-plugin-testing v1.10.0
github.com/stretchr/testify v1.9.0
github.com/testcontainers/testcontainers-go v0.32.0
github.com/wiremock/go-wiremock v1.9.0
)

require github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
)

require (
dario.cat/mergo v1.0.0 // indirect
Expand Down
31 changes: 23 additions & 8 deletions internal/models/acl.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
package models

import (
"fmt"
"terraform-provider-automq/client"

"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
)

Expand All @@ -19,30 +21,42 @@ type KafkaAclResourceModel struct {
Permission types.String `tfsdk:"permission"`
}

func ExpandKafkaACLResource(acl KafkaAclResourceModel, request *client.KafkaAclBindingParam) {
func ExpandKafkaACLResource(acl KafkaAclResourceModel, request *client.KafkaAclBindingParam) diag.Diagnostics {
request.AccessControlParam = client.KafkaControlParam{}
request.ResourcePatternParam = client.KafkaResourcePatternParam{}
request.AccessControlParam.OperationGroup = acl.OperationGroup.ValueString()
request.AccessControlParam.PermissionType = acl.Permission.ValueString()

request.AccessControlParam.User = ParsePrincipalUser(acl.Principal.ValueString())
user, err := ParsePrincipalUser(acl.Principal.ValueString())
if err != nil {
return diag.Diagnostics{diag.NewErrorDiagnostic("Failed to parse principal", err.Error())}
}

request.AccessControlParam.User = user
request.ResourcePatternParam.Name = acl.ResourceName.ValueString()
request.ResourcePatternParam.PatternType = acl.PatternType.ValueString()
request.ResourcePatternParam.ResourceType = acl.ResourceType.ValueString()
return nil
}

func ParsePrincipalUser(principal string) string {
if condition := principal[:5]; condition == "User:" {
return principal[5:]
func ParsePrincipalUser(principal string) (string, error) {
if len(principal) < 5 {
return "", fmt.Errorf("invalid principal format: %s", principal)
}
if condition := principal[:5]; condition != "User:" {
return "", fmt.Errorf("invalid principal format: %s", principal)
}
user := principal[5:]
if user == "" {
return "", fmt.Errorf("invalid principal format: %s", principal)
}
return principal
return user, nil
}

func FlattenKafkaACLResource(acl *client.KafkaAclBindingVO, resource *KafkaAclResourceModel) {
func FlattenKafkaACLResource(acl *client.KafkaAclBindingVO, resource *KafkaAclResourceModel) diag.Diagnostics {
aclId, err := client.GenerateAclID(*acl)
if err != nil {
return
return diag.Diagnostics{diag.NewErrorDiagnostic("Failed to generate ACL ID", err.Error())}
}
resource.ID = types.StringValue(aclId)

Expand All @@ -52,4 +66,5 @@ func FlattenKafkaACLResource(acl *client.KafkaAclBindingVO, resource *KafkaAclRe
resource.Principal = types.StringValue("User:" + acl.AccessControl.User)
resource.OperationGroup = types.StringValue(acl.AccessControl.OperationGroup.Name)
resource.Permission = types.StringValue(acl.AccessControl.PermissionType)
return nil
}
Loading

0 comments on commit b4ef915

Please sign in to comment.