Skip to content

Commit

Permalink
feat(dms): support to get kafka messages (#5861)
Browse files Browse the repository at this point in the history
  • Loading branch information
saf3dfsa authored Nov 15, 2024
1 parent 7b058a0 commit 854990e
Show file tree
Hide file tree
Showing 4 changed files with 455 additions and 0 deletions.
125 changes: 125 additions & 0 deletions docs/data-sources/dms_kafka_messages.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
---
subcategory: "Distributed Message Service (DMS)"
layout: "huaweicloud"
page_title: "HuaweiCloud: huaweicloud_dms_kafka_messages"
description: |-
Use this data source to get the list of Kafka messages.
---

# huaweicloud_dms_kafka_messages

Use this data source to get the list of Kafka messages.

## Example Usage

### Query messages by creation time

```hcl
variable "instance_id" {}
variable "topic" {}
variable "start_time" {}
variable "end_time" {}
data "huaweicloud_dms_kafka_messages" "test" {
instance_id = var.instance_id
topic = var.topic
start_time = var.start_time
end_time = var.end_time
}
```

### Query messages by content's keyword, a maximum of 10 messages can be returned

```hcl
variable "instance_id" {}
variable "topic" {}
variable "start_time" {}
variable "end_time" {}
variable "keyword" {}
data "huaweicloud_dms_kafka_messages" "test" {
instance_id = var.instance_id
topic = var.topic
start_time = var.start_time
end_time = var.end_time
keyword = var.keyword
}
```

### Query messages content by offset

```hcl
variable "instance_id" {}
variable "topic" {}
variable "partition" {}
variable "message_offset" {}
data "huaweicloud_dms_kafka_messages" "test" {
instance_id = var.instance_id
topic = var.topic
partition = var.partition
message_offset = var.message_offset
}
```

## Argument Reference

The following arguments are supported:

* `region` - (Optional, String) Specifies the region in which to query the resource.
If omitted, the provider-level region will be used.

* `instance_id` - (Required, String) Specifies the instance ID.

* `topic` - (Required, String) Specifies the topic name.

* `start_time` - (Optional, String) Specifies the start time, a Unix timestamp in millisecond.
This parameter is mandatory when you query the message creation time.

* `end_time` - (Optional, String) Specifies the end time, a Unix timestamp in millisecond.
This parameter is mandatory when you query the message creation time.

* `download` - (Optional, Bool) Whether download is required.
If it is **false**, the big message will be truncated. Defaults to **false**.

* `message_offset` - (Optional, String) Specifies the message offset.
This parameter is mandatory when you query the message content by offset.

* `partition` - (Optional, String) Specifies the partition.
This parameter is mandatory when you query the message content by offset.

* `keyword` - (Optional, String) Specifies the keyword.
If it's specified, a maximum of **10** messages can be returned.

## Attribute Reference

In addition to all arguments above, the following attributes are exported:

* `id` - The data source ID.

* `messages` - Indicates the message list.

The [messages](#messages_struct) structure is documented below.

<a name="messages_struct"></a>
The `messages` block supports:

* `key` - Indicates the message key.

* `value` - Indicates the message content.

* `timestamp` - Indicates the message production time.

* `huge_message` - Indicates the big data flag.

* `message_offset` - Indicates the message offset.

* `message_id` - Indicates the message ID.

* `partition` - Indicates the partition where the message is located.

* `size` - Indicates the message size.

* `app_id` - Indicates the application ID.

* `tag` - Indicates the message label.
1 change: 1 addition & 0 deletions huaweicloud/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -697,6 +697,7 @@ func Provider() *schema.Provider {
"huaweicloud_dms_kafka_topic_partitions": dms.DataSourceDmsKafkaTopicPartitions(),
"huaweicloud_dms_kafka_users": dms.DataSourceDmsKafkaUsers(),
"huaweicloud_dms_kafka_message_diagnosis_tasks": dms.DataSourceDmsKafkaMessageDiagnosisTasks(),
"huaweicloud_dms_kafka_messages": dms.DataSourceDmsKafkaMessages(),

"huaweicloud_dms_rabbitmq_flavors": dms.DataSourceRabbitMQFlavors(),
"huaweicloud_dms_rabbitmq_plugins": dms.DataSourceDmsRabbitmqPlugins(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
package dms

import (
"fmt"
"strconv"
"testing"
"time"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"

"github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/services/acceptance"
)

func TestAccDataSourceDmsKafkaMessages_basic(t *testing.T) {
dataSource := "data.huaweicloud_dms_kafka_messages.test"
rName := acceptance.RandomAccResourceName()
dc := acceptance.InitDataSourceCheck(dataSource)

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() {
acceptance.TestAccPreCheck(t)
},
ProviderFactories: acceptance.TestAccProviderFactories,
Steps: []resource.TestStep{
{
Config: testDataSourceDataSourceDmsKafkaMessages_basic(rName),
Check: resource.ComposeTestCheckFunc(
dc.CheckResourceExists(),
resource.TestCheckResourceAttrSet(dataSource, "messages.#"),
resource.TestCheckResourceAttrSet(dataSource, "messages.0.key"),
resource.TestCheckResourceAttrSet(dataSource, "messages.0.timestamp"),
resource.TestCheckResourceAttrSet(dataSource, "messages.0.huge_message"),
resource.TestCheckResourceAttrSet(dataSource, "messages.0.message_offset"),
resource.TestCheckResourceAttrSet(dataSource, "messages.0.partition"),
resource.TestCheckResourceAttrSet(dataSource, "messages.0.size"),
),
},
},
})
}

func testDataSourceDataSourceDmsKafkaMessages_basic(name string) string {
startTime := strconv.FormatInt(time.Now().UnixMilli(), 10)
endTime := strconv.FormatInt(time.Now().Add(1*time.Hour).UnixMilli(), 10)
return fmt.Sprintf(`
%[1]s
data "huaweicloud_dms_kafka_messages" "test" {
depends_on = [huaweicloud_dms_kafka_message_produce.test]
instance_id = huaweicloud_dms_kafka_instance.test.id
topic = huaweicloud_dms_kafka_topic.topic.name
start_time = "%[2]s"
end_time = "%[3]s"
download = false
partition = 1
}
data "huaweicloud_dms_kafka_messages" "by_keyword" {
depends_on = [huaweicloud_dms_kafka_message_produce.test]
instance_id = huaweicloud_dms_kafka_instance.test.id
topic = huaweicloud_dms_kafka_topic.topic.name
start_time = "%[2]s"
end_time = "%[3]s"
download = false
keyword = huaweicloud_dms_kafka_message_produce.test.body
}
output "by_keyword_validation" {
value = length(data.huaweicloud_dms_kafka_messages.by_keyword.messages) == 1
}
data "huaweicloud_dms_kafka_messages" "by_offset" {
depends_on = [huaweicloud_dms_kafka_message_produce.test]
instance_id = huaweicloud_dms_kafka_instance.test.id
topic = huaweicloud_dms_kafka_topic.topic.name
partition = 1
message_offset = 0
}
output "by_offset_validation" {
value = length(data.huaweicloud_dms_kafka_messages.by_offset.messages) == 1 && alltrue(
[for v in data.huaweicloud_dms_kafka_messages.by_offset.messages[*].message_offset : v == 0]
)
}
`, testAccKafkaMessageProduce_basic(name), startTime, endTime)
}
Loading

0 comments on commit 854990e

Please sign in to comment.