diff --git a/msi/tools/amazon-cloudwatch-agent.wxs b/msi/tools/amazon-cloudwatch-agent.wxs
index cc456d672..94727221b 100644
--- a/msi/tools/amazon-cloudwatch-agent.wxs
+++ b/msi/tools/amazon-cloudwatch-agent.wxs
@@ -44,6 +44,7 @@
+
@@ -119,6 +120,9 @@
+
+
+
diff --git a/test/metric/metric_value_query.go b/test/metric/metric_value_query.go
index eea358074..be4a7327c 100644
--- a/test/metric/metric_value_query.go
+++ b/test/metric/metric_value_query.go
@@ -46,7 +46,7 @@ func (n *MetricValueFetcher) Fetch(namespace, metricName string, metricSpecificD
Period: &metricQueryPeriod,
Stat: aws.String(string(stat)),
},
- Id: aws.String(strings.ToLower(metricName)),
+ Id: aws.String(strings.ToLower(strings.ReplaceAll(metricName, ".", "_"))),
},
}
diff --git a/test/metric_value_benchmark/agent_configs/jmx_kafka_config.json b/test/metric_value_benchmark/agent_configs/jmx_kafka_config.json
new file mode 100644
index 000000000..53076a3fa
--- /dev/null
+++ b/test/metric_value_benchmark/agent_configs/jmx_kafka_config.json
@@ -0,0 +1,75 @@
+{
+ "agent": {
+ "debug": true
+ },
+ "metrics": {
+ "namespace": "MetricValueBenchmarkJMXTest",
+ "force_flush_interval": 5,
+ "aggregation_dimensions": [
+ [
+ "InstanceId"
+ ]
+ ],
+ "append_dimensions": {
+ "InstanceId": "${aws:InstanceId}"
+ },
+ "metrics_collected": {
+ "jmx": [
+ {
+ "endpoint": "localhost:2000",
+ "kafka": {
+ "measurement": [
+ "kafka.unclean.election.rate",
+ "kafka.request.time.total",
+ "kafka.request.time.avg",
+ "kafka.request.time.99p",
+ "kafka.request.time.50p",
+ "kafka.request.queue",
+ "kafka.request.failed",
+ "kafka.request.count",
+ "kafka.purgatory.size",
+ "kafka.partition.under_replicated",
+ "kafka.partition.offline",
+ "kafka.partition.count",
+ "kafka.network.io",
+ "kafka.message.count",
+ "kafka.max.lag",
+ "kafka.leader.election.rate",
+ "kafka.isr.operation.count",
+ "kafka.controller.active.count"
+ ]
+ }
+ },
+ {
+ "endpoint": "localhost:2010",
+ "kafka-consumer": {
+ "measurement": [
+ "kafka.consumer.total.records-consumed-rate",
+ "kafka.consumer.total.bytes-consumed-rate",
+ "kafka.consumer.records-consumed-rate",
+ "kafka.consumer.fetch-rate",
+ "kafka.consumer.bytes-consumed-rate"
+ ]
+ }
+ },
+ {
+ "endpoint": "localhost:2020",
+ "kafka-producer": {
+ "measurement": [
+ "kafka.producer.io-wait-time-ns-avg",
+ "kafka.producer.record-retry-rate",
+ "kafka.producer.compression-rate",
+ "kafka.producer.outgoing-byte-rate",
+ "kafka.producer.request-rate",
+ "kafka.producer.byte-rate",
+ "kafka.producer.request-latency-avg",
+ "kafka.producer.response-rate",
+ "kafka.producer.record-error-rate",
+ "kafka.producer.record-send-rate"
+ ]
+ }
+ }
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/metric_value_benchmark/agent_configs/jmx_tomcat_jvm_config.json b/test/metric_value_benchmark/agent_configs/jmx_tomcat_jvm_config.json
new file mode 100644
index 000000000..fc2b09ac2
--- /dev/null
+++ b/test/metric_value_benchmark/agent_configs/jmx_tomcat_jvm_config.json
@@ -0,0 +1,55 @@
+{
+ "agent": {
+ "debug": true
+ },
+ "metrics": {
+ "namespace": "MetricValueBenchmarkJMXTest",
+ "force_flush_interval": 5,
+ "aggregation_dimensions": [
+ [
+ "InstanceId"
+ ]
+ ],
+ "append_dimensions": {
+ "InstanceId": "${aws:InstanceId}"
+ },
+ "metrics_collected": {
+ "jmx": [
+ {
+ "endpoint": "localhost:2030",
+ "tomcat": {
+ "measurement": [
+ "tomcat.traffic",
+ "tomcat.threads",
+ "tomcat.sessions",
+ "tomcat.request_count",
+ "tomcat.processing_time",
+ "tomcat.max_time",
+ "tomcat.errors"
+ ]
+ },
+ "jvm": {
+ "measurement": [
+ "jvm.threads.count",
+ "jvm.memory.pool.used",
+ "jvm.memory.pool.max",
+ "jvm.memory.pool.init",
+ "jvm.memory.pool.committed",
+ "jvm.memory.nonheap.used",
+ "jvm.memory.nonheap.max",
+ "jvm.memory.nonheap.init",
+ "jvm.memory.nonheap.committed",
+ "jvm.memory.heap.used",
+ "jvm.memory.heap.max",
+ "jvm.memory.heap.init",
+ "jvm.memory.heap.committed",
+ "jvm.gc.collections.elapsed",
+ "jvm.gc.collections.count",
+ "jvm.classes.loaded"
+ ]
+ }
+ }
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/metric_value_benchmark/jars/spring-boot-web-starter-tomcat.jar b/test/metric_value_benchmark/jars/spring-boot-web-starter-tomcat.jar
new file mode 100644
index 000000000..94add4cdc
Binary files /dev/null and b/test/metric_value_benchmark/jars/spring-boot-web-starter-tomcat.jar differ
diff --git a/test/metric_value_benchmark/jmx_kafka_test.go b/test/metric_value_benchmark/jmx_kafka_test.go
new file mode 100644
index 000000000..362736f87
--- /dev/null
+++ b/test/metric_value_benchmark/jmx_kafka_test.go
@@ -0,0 +1,154 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT
+
+//go:build !windows
+
+package metric_value_benchmark
+
+import (
+ "log"
+ "time"
+
+ "github.com/aws/amazon-cloudwatch-agent-test/test/metric"
+ "github.com/aws/amazon-cloudwatch-agent-test/test/metric/dimension"
+ "github.com/aws/amazon-cloudwatch-agent-test/test/status"
+ "github.com/aws/amazon-cloudwatch-agent-test/test/test_runner"
+ "github.com/aws/amazon-cloudwatch-agent-test/util/common"
+)
+
+type JMXKafkaTestRunner struct {
+ test_runner.BaseTestRunner
+}
+
+var _ test_runner.ITestRunner = (*JMXKafkaTestRunner)(nil)
+
+func (t *JMXKafkaTestRunner) Validate() status.TestGroupResult {
+ metricsToFetch := t.GetMeasuredMetrics()
+ testResults := make([]status.TestResult, len(metricsToFetch))
+ for i, metricName := range metricsToFetch {
+ testResults[i] = t.validateJMXMetric(metricName)
+ }
+
+ return status.TestGroupResult{
+ Name: t.GetTestName(),
+ TestResults: testResults,
+ }
+}
+
+func (t *JMXKafkaTestRunner) GetTestName() string {
+ return "JMXKafka"
+}
+
+func (t *JMXKafkaTestRunner) GetAgentConfigFileName() string {
+ return "jmx_kafka_config.json"
+}
+
+func (t *JMXKafkaTestRunner) GetAgentRunDuration() time.Duration {
+ return 2 * time.Minute
+}
+
+func (t *JMXKafkaTestRunner) SetupBeforeAgentRun() error {
+ err := t.BaseTestRunner.SetupBeforeAgentRun()
+ if err != nil {
+ return err
+ }
+
+ log.Println("set up zookeeper and kafka")
+ startJMXCommands := []string{
+ "curl https://dlcdn.apache.org/kafka/3.6.2/kafka_2.13-3.6.2.tgz -o kafka_2.13-3.6.2.tgz",
+ "tar -xzf kafka_2.13-3.6.2.tgz",
+ "echo 'export JMX_PORT=2000'|cat - kafka_2.13-3.6.2/bin/kafka-server-start.sh > /tmp/kafka-server-start.sh && mv /tmp/kafka-server-start.sh kafka_2.13-3.6.2/bin/kafka-server-start.sh",
+ "echo 'export JMX_PORT=2010'|cat - kafka_2.13-3.6.2/bin/kafka-console-consumer.sh > /tmp/kafka-console-consumer.sh && mv /tmp/kafka-console-consumer.sh kafka_2.13-3.6.2/bin/kafka-console-consumer.sh",
+ "echo 'export JMX_PORT=2020'|cat - kafka_2.13-3.6.2/bin/kafka-console-producer.sh > /tmp/kafka-console-producer.sh && mv /tmp/kafka-console-producer.sh kafka_2.13-3.6.2/bin/kafka-console-producer.sh",
+ "sudo chmod +x kafka_2.13-3.6.2/bin/kafka-run-class.sh",
+ "sudo chmod +x kafka_2.13-3.6.2/bin/kafka-server-start.sh",
+ "sudo chmod +x kafka_2.13-3.6.2/bin/kafka-console-consumer.sh",
+ "sudo chmod +x kafka_2.13-3.6.2/bin/kafka-console-producer.sh",
+ "(yes | nohup kafka_2.13-3.6.2/bin/kafka-console-producer.sh --topic quickstart-events --bootstrap-server localhost:9092) > /tmp/kafka-console-producer-logs.txt 2>&1 &",
+ "kafka_2.13-3.6.2/bin/kafka-console-consumer.sh --topic quickstart-events --from-beginning --bootstrap-server localhost:9092 > /tmp/kafka-console-consumer-logs.txt 2>&1 &",
+ "curl https://dlcdn.apache.org/zookeeper/zookeeper-3.8.4/apache-zookeeper-3.8.4-bin.tar.gz -o apache-zookeeper-3.8.4-bin.tar.gz",
+ "tar -xzf apache-zookeeper-3.8.4-bin.tar.gz",
+ "mkdir apache-zookeeper-3.8.4-bin/data",
+ "touch apache-zookeeper-3.8.4-bin/conf/zoo.cfg",
+ "echo -e 'tickTime = 2000\ndataDir = ../data\nclientPort = 2181\ninitLimit = 5\nsyncLimit = 2\n' >> apache-zookeeper-3.8.4-bin/conf/zoo.cfg",
+ "sudo apache-zookeeper-3.8.4-bin/bin/zkServer.sh start",
+ "sudo kafka_2.13-3.6.2/bin/kafka-server-start.sh kafka_2.13-3.6.2/config/server.properties > /tmp/kafka-server-start-logs.txt 2>&1 &",
+ }
+
+ err = common.RunCommands(startJMXCommands)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func (t *JMXKafkaTestRunner) GetMeasuredMetrics() []string {
+ return []string{
+ "kafka.unclean.election.rate",
+ "kafka.request.time.total",
+ "kafka.request.time.avg",
+ "kafka.request.time.99p",
+ "kafka.request.time.50p",
+ "kafka.request.queue",
+ "kafka.request.failed",
+ "kafka.request.count",
+ "kafka.purgatory.size",
+ "kafka.partition.under_replicated",
+ "kafka.partition.offline",
+ "kafka.partition.count",
+ "kafka.network.io",
+ "kafka.message.count",
+ "kafka.max.lag",
+ "kafka.leader.election.rate",
+ "kafka.isr.operation.count",
+ "kafka.controller.active.count",
+ "kafka.consumer.total.records-consumed-rate",
+ "kafka.consumer.total.bytes-consumed-rate",
+ "kafka.consumer.records-consumed-rate",
+ "kafka.consumer.fetch-rate",
+ "kafka.consumer.bytes-consumed-rate",
+ "kafka.producer.io-wait-time-ns-avg",
+ "kafka.producer.record-retry-rate",
+ "kafka.producer.compression-rate",
+ "kafka.producer.outgoing-byte-rate",
+ "kafka.producer.request-rate",
+ "kafka.producer.byte-rate",
+ "kafka.producer.request-latency-avg",
+ "kafka.producer.response-rate",
+ "kafka.producer.record-error-rate",
+ "kafka.producer.record-send-rate",
+ }
+}
+
+func (t *JMXKafkaTestRunner) validateJMXMetric(metricName string) status.TestResult {
+ testResult := status.TestResult{
+ Name: metricName,
+ Status: status.FAILED,
+ }
+
+ dims, failed := t.DimensionFactory.GetDimensions([]dimension.Instruction{
+ {
+ Key: "InstanceId",
+ Value: dimension.UnknownDimensionValue(),
+ },
+ })
+
+ if len(failed) > 0 {
+ return testResult
+ }
+
+ fetcher := metric.MetricValueFetcher{}
+ values, err := fetcher.Fetch(jmxNamespace, metricName, dims, metric.AVERAGE, metric.HighResolutionStatPeriod)
+ log.Printf("metric values are %v", values)
+ if err != nil {
+ log.Printf("err: %v\n", err)
+ return testResult
+ }
+
+ if !metric.IsAllValuesGreaterThanOrEqualToExpectedValue(metricName, values, 0) {
+ return testResult
+ }
+
+ testResult.Status = status.SUCCESSFUL
+ return testResult
+}
diff --git a/test/metric_value_benchmark/jmx_tomcat_jvm_test.go b/test/metric_value_benchmark/jmx_tomcat_jvm_test.go
new file mode 100644
index 000000000..75bff8318
--- /dev/null
+++ b/test/metric_value_benchmark/jmx_tomcat_jvm_test.go
@@ -0,0 +1,129 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT
+
+//go:build !windows
+
+package metric_value_benchmark
+
+import (
+ "log"
+ "time"
+
+ "github.com/aws/amazon-cloudwatch-agent-test/test/metric"
+ "github.com/aws/amazon-cloudwatch-agent-test/test/metric/dimension"
+ "github.com/aws/amazon-cloudwatch-agent-test/test/status"
+ "github.com/aws/amazon-cloudwatch-agent-test/test/test_runner"
+ "github.com/aws/amazon-cloudwatch-agent-test/util/common"
+)
+
+const jmxNamespace = "MetricValueBenchmarkJMXTest"
+
+type JMXTomcatJVMTestRunner struct {
+ test_runner.BaseTestRunner
+}
+
+var _ test_runner.ITestRunner = (*JMXTomcatJVMTestRunner)(nil)
+
+func (t *JMXTomcatJVMTestRunner) Validate() status.TestGroupResult {
+ metricsToFetch := t.GetMeasuredMetrics()
+ testResults := make([]status.TestResult, len(metricsToFetch))
+ for i, metricName := range metricsToFetch {
+ testResults[i] = t.validateJMXMetric(metricName)
+ }
+
+ return status.TestGroupResult{
+ Name: t.GetTestName(),
+ TestResults: testResults,
+ }
+}
+
+func (t *JMXTomcatJVMTestRunner) GetTestName() string {
+ return "JMXTomcatJVM"
+}
+
+func (t *JMXTomcatJVMTestRunner) GetAgentConfigFileName() string {
+ return "jmx_tomcat_jvm_config.json"
+}
+
+func (t *JMXTomcatJVMTestRunner) GetAgentRunDuration() time.Duration {
+ return 2 * time.Minute
+}
+
+func (t *JMXTomcatJVMTestRunner) SetupBeforeAgentRun() error {
+ err := t.BaseTestRunner.SetupBeforeAgentRun()
+ if err != nil {
+ return err
+ }
+
+ log.Println("set up jvm and tomcat")
+ startJMXCommands := []string{
+ "nohup java -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=2030 -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.rmi.port=2030 -Dcom.sun.management.jmxremote.host=0.0.0.0 -Djava.rmi.server.hostname=0.0.0.0 -Dserver.port=8090 -Dspring.application.admin.enabled=true -jar jars/spring-boot-web-starter-tomcat.jar > /tmp/spring-boot-web-starter-tomcat-jar.txt 2>&1 &",
+ }
+
+ err = common.RunCommands(startJMXCommands)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func (t *JMXTomcatJVMTestRunner) GetMeasuredMetrics() []string {
+ return []string{
+ "jvm.threads.count",
+ "jvm.memory.pool.used",
+ "jvm.memory.pool.max",
+ "jvm.memory.pool.init",
+ "jvm.memory.pool.committed",
+ "jvm.memory.nonheap.used",
+ "jvm.memory.nonheap.max",
+ "jvm.memory.nonheap.init",
+ "jvm.memory.nonheap.committed",
+ "jvm.memory.heap.used",
+ "jvm.memory.heap.max",
+ "jvm.memory.heap.init",
+ "jvm.memory.heap.committed",
+ "jvm.gc.collections.elapsed",
+ "jvm.gc.collections.count",
+ "jvm.classes.loaded",
+ "tomcat.traffic",
+ "tomcat.threads",
+ "tomcat.sessions",
+ "tomcat.request_count",
+ "tomcat.processing_time",
+ "tomcat.max_time",
+ "tomcat.errors",
+ }
+}
+
+func (t *JMXTomcatJVMTestRunner) validateJMXMetric(metricName string) status.TestResult {
+ testResult := status.TestResult{
+ Name: metricName,
+ Status: status.FAILED,
+ }
+
+ dims, failed := t.DimensionFactory.GetDimensions([]dimension.Instruction{
+ {
+ Key: "InstanceId",
+ Value: dimension.UnknownDimensionValue(),
+ },
+ })
+
+ if len(failed) > 0 {
+ return testResult
+ }
+
+ fetcher := metric.MetricValueFetcher{}
+ values, err := fetcher.Fetch(jmxNamespace, metricName, dims, metric.AVERAGE, metric.HighResolutionStatPeriod)
+ log.Printf("metric values are %v", values)
+ if err != nil {
+ log.Printf("err: %v\n", err)
+ return testResult
+ }
+
+ if !metric.IsAllValuesGreaterThanOrEqualToExpectedValue(metricName, values, 0) {
+ return testResult
+ }
+
+ testResult.Status = status.SUCCESSFUL
+ return testResult
+}
diff --git a/test/metric_value_benchmark/metrics_value_benchmark_test.go b/test/metric_value_benchmark/metrics_value_benchmark_test.go
index 8dbdd7390..ba7e63276 100644
--- a/test/metric_value_benchmark/metrics_value_benchmark_test.go
+++ b/test/metric_value_benchmark/metrics_value_benchmark_test.go
@@ -113,6 +113,8 @@ func getEc2TestRunners(env *environment.MetaData) []*test_runner.TestRunner {
{TestRunner: &ProcessesTestRunner{test_runner.BaseTestRunner{DimensionFactory: factory}}},
{TestRunner: &CollectDTestRunner{test_runner.BaseTestRunner{DimensionFactory: factory}}},
{TestRunner: &RenameSSMTestRunner{test_runner.BaseTestRunner{DimensionFactory: factory}}},
+ {TestRunner: &JMXTomcatJVMTestRunner{test_runner.BaseTestRunner{DimensionFactory: factory}}},
+ {TestRunner: &JMXKafkaTestRunner{test_runner.BaseTestRunner{DimensionFactory: factory}}},
}
}
return ec2TestRunners
@@ -157,5 +159,10 @@ func shouldRunEC2Test(env *environment.MetaData, t *test_runner.TestRunner) bool
}
_, shouldRun := env.EC2PluginTests[strings.ToLower(t.TestRunner.GetTestName())]
_, shouldExclude := env.ExcludedTests[strings.ToLower(t.TestRunner.GetTestName())]
- return shouldRun || !shouldExclude
+ if shouldRun {
+ return true
+ } else if len(env.ExcludedTests) != 0 {
+ return !shouldExclude
+ }
+ return false
}