From a5e979a52728c5e3696e426f34dd7e5f16b77d64 Mon Sep 17 00:00:00 2001 From: rickbrouwer Date: Thu, 26 Sep 2024 14:53:48 +0200 Subject: [PATCH] Refactor elasticsearch scaler config (#6101) Signed-off-by: Rick Brouwer Signed-off-by: rickbrouwer --- pkg/scalers/elasticsearch_scaler.go | 254 ++++-------------- pkg/scalers/elasticsearch_scaler_test.go | 188 +++++++------ pkg/scalers/kafka_scaler.go | 5 +- pkg/scalers/scalersconfig/typed_config.go | 27 +- .../scalersconfig/typed_config_test.go | 14 +- 5 files changed, 194 insertions(+), 294 deletions(-) diff --git a/pkg/scalers/elasticsearch_scaler.go b/pkg/scalers/elasticsearch_scaler.go index da7c1c2c1a8..44a27e8e463 100644 --- a/pkg/scalers/elasticsearch_scaler.go +++ b/pkg/scalers/elasticsearch_scaler.go @@ -4,7 +4,6 @@ import ( "bytes" "context" "encoding/json" - "errors" "fmt" "io" "strconv" @@ -22,28 +21,45 @@ import ( type elasticsearchScaler struct { metricType v2.MetricTargetType - metadata *elasticsearchMetadata + metadata elasticsearchMetadata esClient *elasticsearch.Client logger logr.Logger } type elasticsearchMetadata struct { - addresses []string - unsafeSsl bool - username string - password string - cloudID string - apiKey string - indexes []string - searchTemplateName string - parameters []string - valueLocation string - targetValue float64 - activationTargetValue float64 - metricName string + Addresses []string `keda:"name=addresses, order=authParams;triggerMetadata, optional"` + UnsafeSsl bool `keda:"name=unsafeSsl, order=triggerMetadata, default=false"` + Username string `keda:"name=username, order=authParams;triggerMetadata, optional"` + Password string `keda:"name=password, order=authParams;resolvedEnv;triggerMetadata, optional"` + CloudID string `keda:"name=cloudID, order=authParams;triggerMetadata, optional"` + APIKey string `keda:"name=apiKey, order=authParams;triggerMetadata, optional"` + Index []string `keda:"name=index, order=authParams;triggerMetadata, separator=;"` + SearchTemplateName string `keda:"name=searchTemplateName, order=authParams;triggerMetadata"` + Parameters []string `keda:"name=parameters, order=triggerMetadata, optional, separator=;"` + ValueLocation string `keda:"name=valueLocation, order=authParams;triggerMetadata"` + TargetValue float64 `keda:"name=targetValue, order=authParams;triggerMetadata"` + ActivationTargetValue float64 `keda:"name=activationTargetValue, order=triggerMetadata, default=0"` + MetricName string `keda:"name=metricName, order=triggerMetadata, optional"` + + TriggerIndex int +} + +func (m *elasticsearchMetadata) Validate() error { + if (m.CloudID != "" || m.APIKey != "") && (len(m.Addresses) > 0 || m.Username != "" || m.Password != "") { + return fmt.Errorf("can't provide both cloud config and endpoint addresses") + } + if (m.CloudID == "" && m.APIKey == "") && (len(m.Addresses) == 0 && m.Username == "" && m.Password == "") { + return fmt.Errorf("must provide either cloud config or endpoint addresses") + } + if (m.CloudID != "" && m.APIKey == "") || (m.CloudID == "" && m.APIKey != "") { + return fmt.Errorf("both cloudID and apiKey must be provided when cloudID or apiKey is used") + } + if len(m.Addresses) > 0 && (m.Username == "" || m.Password == "") { + return fmt.Errorf("both username and password must be provided when addresses is used") + } + return nil } -// NewElasticsearchScaler creates a new elasticsearch scaler func NewElasticsearchScaler(config *scalersconfig.ScalerConfig) (Scaler, error) { metricType, err := GetMetricTargetType(config) if err != nil { @@ -69,184 +85,37 @@ func NewElasticsearchScaler(config *scalersconfig.ScalerConfig) (Scaler, error) }, nil } -const defaultUnsafeSsl = false - -func hasCloudConfig(meta *elasticsearchMetadata) bool { - if meta.cloudID != "" { - return true - } - if meta.apiKey != "" { - return true - } - return false -} - -func hasEndpointsConfig(meta *elasticsearchMetadata) bool { - if len(meta.addresses) > 0 { - return true - } - if meta.username != "" { - return true - } - if meta.password != "" { - return true - } - return false -} - -func extractEndpointsConfig(config *scalersconfig.ScalerConfig, meta *elasticsearchMetadata) error { - addresses, err := GetFromAuthOrMeta(config, "addresses") - if err != nil { - return err - } - - meta.addresses = splitAndTrimBySep(addresses, ",") - if val, ok := config.AuthParams["username"]; ok { - meta.username = val - } else if val, ok := config.TriggerMetadata["username"]; ok { - meta.username = val - } - - if config.AuthParams["password"] != "" { - meta.password = config.AuthParams["password"] - } else if config.TriggerMetadata["passwordFromEnv"] != "" { - meta.password = config.ResolvedEnv[config.TriggerMetadata["passwordFromEnv"]] - } - - return nil -} - -func extractCloudConfig(config *scalersconfig.ScalerConfig, meta *elasticsearchMetadata) error { - cloudID, err := GetFromAuthOrMeta(config, "cloudID") - if err != nil { - return err - } - meta.cloudID = cloudID - - apiKey, err := GetFromAuthOrMeta(config, "apiKey") - if err != nil { - return err - } - meta.apiKey = apiKey - return nil -} - -var ( - // ErrElasticsearchMissingAddressesOrCloudConfig is returned when endpoint addresses or cloud config is missing. - ErrElasticsearchMissingAddressesOrCloudConfig = errors.New("must provide either endpoint addresses or cloud config") - - // ErrElasticsearchConfigConflict is returned when both endpoint addresses and cloud config are provided. - ErrElasticsearchConfigConflict = errors.New("can't provide endpoint addresses and cloud config at the same time") -) - -func parseElasticsearchMetadata(config *scalersconfig.ScalerConfig) (*elasticsearchMetadata, error) { +func parseElasticsearchMetadata(config *scalersconfig.ScalerConfig) (elasticsearchMetadata, error) { meta := elasticsearchMetadata{} + err := config.TypedConfig(&meta) - var err error - addresses, err := GetFromAuthOrMeta(config, "addresses") - cloudID, errCloudConfig := GetFromAuthOrMeta(config, "cloudID") - if err != nil && errCloudConfig != nil { - return nil, ErrElasticsearchMissingAddressesOrCloudConfig - } - - if err == nil && addresses != "" { - err = extractEndpointsConfig(config, &meta) - if err != nil { - return nil, err - } - } - if errCloudConfig == nil && cloudID != "" { - err = extractCloudConfig(config, &meta) - if err != nil { - return nil, err - } - } - - if hasEndpointsConfig(&meta) && hasCloudConfig(&meta) { - return nil, ErrElasticsearchConfigConflict - } - - if val, ok := config.TriggerMetadata["unsafeSsl"]; ok { - unsafeSsl, err := strconv.ParseBool(val) - if err != nil { - return nil, fmt.Errorf("error parsing unsafeSsl: %w", err) - } - meta.unsafeSsl = unsafeSsl - } else { - meta.unsafeSsl = defaultUnsafeSsl - } - - index, err := GetFromAuthOrMeta(config, "index") - if err != nil { - return nil, err - } - meta.indexes = splitAndTrimBySep(index, ";") - - searchTemplateName, err := GetFromAuthOrMeta(config, "searchTemplateName") - if err != nil { - return nil, err - } - meta.searchTemplateName = searchTemplateName - - if val, ok := config.TriggerMetadata["parameters"]; ok { - meta.parameters = splitAndTrimBySep(val, ";") - } - - valueLocation, err := GetFromAuthOrMeta(config, "valueLocation") - if err != nil { - return nil, err - } - meta.valueLocation = valueLocation - - targetValueString, err := GetFromAuthOrMeta(config, "targetValue") - if err != nil { - if config.AsMetricSource { - targetValueString = "0" - } else { - return nil, err - } - } - targetValue, err := strconv.ParseFloat(targetValueString, 64) if err != nil { - return nil, fmt.Errorf("targetValue parsing error: %w", err) + return meta, err } - meta.targetValue = targetValue - meta.activationTargetValue = 0 - if val, ok := config.TriggerMetadata["activationTargetValue"]; ok { - activationTargetValue, err := strconv.ParseFloat(val, 64) - if err != nil { - return nil, fmt.Errorf("activationTargetValue parsing error: %w", err) - } - meta.activationTargetValue = activationTargetValue - } + meta.MetricName = GenerateMetricNameWithIndex(config.TriggerIndex, util.NormalizeString(fmt.Sprintf("elasticsearch-%s", meta.SearchTemplateName))) + meta.TriggerIndex = config.TriggerIndex - meta.metricName = GenerateMetricNameWithIndex(config.TriggerIndex, util.NormalizeString(fmt.Sprintf("elasticsearch-%s", meta.searchTemplateName))) - return &meta, nil + return meta, nil } -// newElasticsearchClient creates elasticsearch db connection -func newElasticsearchClient(meta *elasticsearchMetadata, logger logr.Logger) (*elasticsearch.Client, error) { +func newElasticsearchClient(meta elasticsearchMetadata, logger logr.Logger) (*elasticsearch.Client, error) { var config elasticsearch.Config - if hasCloudConfig(meta) { + if meta.CloudID != "" { config = elasticsearch.Config{ - CloudID: meta.cloudID, - APIKey: meta.apiKey, + CloudID: meta.CloudID, + APIKey: meta.APIKey, } } else { config = elasticsearch.Config{ - Addresses: meta.addresses, - } - if meta.username != "" { - config.Username = meta.username - } - if meta.password != "" { - config.Password = meta.password + Addresses: meta.Addresses, + Username: meta.Username, + Password: meta.Password, } } - config.Transport = util.CreateHTTPTransport(meta.unsafeSsl) + config.Transport = util.CreateHTTPTransport(meta.UnsafeSsl) esClient, err := elasticsearch.NewClient(config) if err != nil { logger.Error(err, fmt.Sprintf("Found error when creating client: %s", err)) @@ -269,14 +138,14 @@ func (s *elasticsearchScaler) Close(_ context.Context) error { func (s *elasticsearchScaler) getQueryResult(ctx context.Context) (float64, error) { // Build the request body. var body bytes.Buffer - if err := json.NewEncoder(&body).Encode(buildQuery(s.metadata)); err != nil { + if err := json.NewEncoder(&body).Encode(buildQuery(&s.metadata)); err != nil { s.logger.Error(err, "Error encoding query: %s", err) } // Run the templated search res, err := s.esClient.SearchTemplate( &body, - s.esClient.SearchTemplate.WithIndex(s.metadata.indexes...), + s.esClient.SearchTemplate.WithIndex(s.metadata.Index...), s.esClient.SearchTemplate.WithContext(ctx), ) if err != nil { @@ -289,7 +158,7 @@ func (s *elasticsearchScaler) getQueryResult(ctx context.Context) (float64, erro if err != nil { return 0, err } - v, err := getValueFromSearch(b, s.metadata.valueLocation) + v, err := getValueFromSearch(b, s.metadata.ValueLocation) if err != nil { return 0, err } @@ -298,14 +167,16 @@ func (s *elasticsearchScaler) getQueryResult(ctx context.Context) (float64, erro func buildQuery(metadata *elasticsearchMetadata) map[string]interface{} { parameters := map[string]interface{}{} - for _, p := range metadata.parameters { + for _, p := range metadata.Parameters { if p != "" { - kv := splitAndTrimBySep(p, ":") - parameters[kv[0]] = kv[1] + kv := strings.Split(p, ":") + key := strings.TrimSpace(kv[0]) + value := strings.TrimSpace(kv[1]) + parameters[key] = value } } query := map[string]interface{}{ - "id": metadata.searchTemplateName, + "id": metadata.SearchTemplateName, } if len(parameters) > 0 { query["params"] = parameters @@ -333,9 +204,9 @@ func getValueFromSearch(body []byte, valueLocation string) (float64, error) { func (s *elasticsearchScaler) GetMetricSpecForScaling(context.Context) []v2.MetricSpec { externalMetric := &v2.ExternalMetricSource{ Metric: v2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: s.metadata.MetricName, }, - Target: GetMetricTargetMili(s.metricType, s.metadata.targetValue), + Target: GetMetricTargetMili(s.metricType, s.metadata.TargetValue), } metricSpec := v2.MetricSpec{ External: externalMetric, Type: externalMetricType, @@ -352,14 +223,5 @@ func (s *elasticsearchScaler) GetMetricsAndActivity(ctx context.Context, metricN metric := GenerateMetricInMili(metricName, num) - return []external_metrics.ExternalMetricValue{metric}, num > s.metadata.activationTargetValue, nil -} - -// Splits a string separated by a specified separator and trims space from all the elements. -func splitAndTrimBySep(s string, sep string) []string { - x := strings.Split(s, sep) - for i := range x { - x[i] = strings.Trim(x[i], " ") - } - return x + return []external_metrics.ExternalMetricValue{metric}, num > s.metadata.ActivationTargetValue, nil } diff --git a/pkg/scalers/elasticsearch_scaler_test.go b/pkg/scalers/elasticsearch_scaler_test.go index 43f9deb0f15..95725065703 100644 --- a/pkg/scalers/elasticsearch_scaler_test.go +++ b/pkg/scalers/elasticsearch_scaler_test.go @@ -3,7 +3,6 @@ package scalers import ( "context" "fmt" - "strconv" "testing" "github.com/stretchr/testify/assert" @@ -38,25 +37,40 @@ var testCases = []parseElasticsearchMetadataTestData{ name: "must provide either endpoint addresses or cloud config", metadata: map[string]string{}, authParams: map[string]string{}, - expectedError: ErrElasticsearchMissingAddressesOrCloudConfig, + expectedError: fmt.Errorf("must provide either cloud config or endpoint addresses"), }, { name: "no apiKey given", metadata: map[string]string{"cloudID": "my-cluster:xxxxxxxxxxx"}, authParams: map[string]string{}, - expectedError: ErrScalerConfigMissingField, + expectedError: fmt.Errorf("both cloudID and apiKey must be provided when cloudID or apiKey is used"), }, { name: "can't provide endpoint addresses and cloud config at the same time", metadata: map[string]string{"addresses": "http://localhost:9200", "cloudID": "my-cluster:xxxxxxxxxxx"}, authParams: map[string]string{"username": "admin", "apiKey": "xxxxxxxxx"}, - expectedError: ErrElasticsearchConfigConflict, + expectedError: fmt.Errorf("can't provide both cloud config and endpoint addresses"), + }, + { + name: "both username and password must be provided when addresses is used", + metadata: map[string]string{ + "addresses": "http://localhost:9200", + "unsafeSsl": "true", + "index": "index1", + "searchTemplateName": "myAwesomeSearch", + "parameters": "param1:value1", + "valueLocation": "hits.hits[0]._source.value", + "targetValue": "12.2", + "activationTargetValue": "3.33", + }, + authParams: map[string]string{"username": "admin"}, + expectedError: fmt.Errorf("both username and password must be provided when addresses is used"), }, { name: "no index given", metadata: map[string]string{"addresses": "http://localhost:9200"}, authParams: map[string]string{"username": "admin"}, - expectedError: ErrScalerConfigMissingField, + expectedError: fmt.Errorf("missing required parameter \"index\""), }, { name: "no searchTemplateName given", @@ -65,7 +79,7 @@ var testCases = []parseElasticsearchMetadataTestData{ "index": "index1", }, authParams: map[string]string{"username": "admin"}, - expectedError: ErrScalerConfigMissingField, + expectedError: fmt.Errorf("missing required parameter \"searchTemplateName\""), }, { name: "no valueLocation given", @@ -75,7 +89,7 @@ var testCases = []parseElasticsearchMetadataTestData{ "searchTemplateName": "searchTemplateName", }, authParams: map[string]string{"username": "admin"}, - expectedError: ErrScalerConfigMissingField, + expectedError: fmt.Errorf("missing required parameter \"valueLocation\""), }, { name: "no targetValue given", @@ -86,7 +100,7 @@ var testCases = []parseElasticsearchMetadataTestData{ "valueLocation": "toto", }, authParams: map[string]string{"username": "admin"}, - expectedError: ErrScalerConfigMissingField, + expectedError: fmt.Errorf("missing required parameter \"targetValue\""), }, { name: "invalid targetValue", @@ -98,7 +112,7 @@ var testCases = []parseElasticsearchMetadataTestData{ "targetValue": "AA", }, authParams: map[string]string{"username": "admin"}, - expectedError: strconv.ErrSyntax, + expectedError: fmt.Errorf("unable to set param \"targetValue\""), }, { name: "invalid activationTargetValue", @@ -111,7 +125,7 @@ var testCases = []parseElasticsearchMetadataTestData{ "activationTargetValue": "AA", }, authParams: map[string]string{"username": "admin"}, - expectedError: strconv.ErrSyntax, + expectedError: fmt.Errorf("unable to set param \"activationTargetValue\""), }, { name: "all fields ok", @@ -130,17 +144,17 @@ var testCases = []parseElasticsearchMetadataTestData{ "password": "password", }, expectedMetadata: &elasticsearchMetadata{ - addresses: []string{"http://localhost:9200"}, - unsafeSsl: true, - indexes: []string{"index1"}, - username: "admin", - password: "password", - searchTemplateName: "myAwesomeSearch", - parameters: []string{"param1:value1"}, - valueLocation: "hits.hits[0]._source.value", - targetValue: 12.2, - activationTargetValue: 3.33, - metricName: "s0-elasticsearch-myAwesomeSearch", + Addresses: []string{"http://localhost:9200"}, + UnsafeSsl: true, + Index: []string{"index1"}, + Username: "admin", + Password: "password", + SearchTemplateName: "myAwesomeSearch", + Parameters: []string{"param1:value1"}, + ValueLocation: "hits.hits[0]._source.value", + TargetValue: 12.2, + ActivationTargetValue: 3.33, + MetricName: "s0-elasticsearch-myAwesomeSearch", }, expectedError: nil, }, @@ -160,16 +174,16 @@ var testCases = []parseElasticsearchMetadataTestData{ "password": "password", }, expectedMetadata: &elasticsearchMetadata{ - addresses: []string{"http://localhost:9200"}, - unsafeSsl: false, - indexes: []string{"index1", "index2"}, - username: "admin", - password: "password", - searchTemplateName: "myAwesomeSearch", - parameters: []string{"param1:value1"}, - valueLocation: "hits.hits[0]._source.value", - targetValue: 12, - metricName: "s0-elasticsearch-myAwesomeSearch", + Addresses: []string{"http://localhost:9200"}, + UnsafeSsl: false, + Index: []string{"index1", "index2"}, + Username: "admin", + Password: "password", + SearchTemplateName: "myAwesomeSearch", + Parameters: []string{"param1:value1"}, + ValueLocation: "hits.hits[0]._source.value", + TargetValue: 12, + MetricName: "s0-elasticsearch-myAwesomeSearch", }, expectedError: nil, }, @@ -189,16 +203,16 @@ var testCases = []parseElasticsearchMetadataTestData{ "password": "password", }, expectedMetadata: &elasticsearchMetadata{ - addresses: []string{"http://localhost:9200"}, - unsafeSsl: false, - indexes: []string{"index1", "index2"}, - username: "admin", - password: "password", - searchTemplateName: "myAwesomeSearch", - parameters: []string{"param1:value1"}, - valueLocation: "hits.hits[0]._source.value", - targetValue: 12, - metricName: "s0-elasticsearch-myAwesomeSearch", + Addresses: []string{"http://localhost:9200"}, + UnsafeSsl: false, + Index: []string{"index1", "index2"}, + Username: "admin", + Password: "password", + SearchTemplateName: "myAwesomeSearch", + Parameters: []string{"param1:value1"}, + ValueLocation: "hits.hits[0]._source.value", + TargetValue: 12, + MetricName: "s0-elasticsearch-myAwesomeSearch", }, expectedError: nil, }, @@ -218,16 +232,16 @@ var testCases = []parseElasticsearchMetadataTestData{ "password": "password", }, expectedMetadata: &elasticsearchMetadata{ - addresses: []string{"http://localhost:9200", "http://localhost:9201"}, - unsafeSsl: false, - indexes: []string{"index1"}, - username: "admin", - password: "password", - searchTemplateName: "myAwesomeSearch", - parameters: []string{"param1:value1"}, - valueLocation: "hits.hits[0]._source.value", - targetValue: 12, - metricName: "s0-elasticsearch-myAwesomeSearch", + Addresses: []string{"http://localhost:9200", "http://localhost:9201"}, + UnsafeSsl: false, + Index: []string{"index1"}, + Username: "admin", + Password: "password", + SearchTemplateName: "myAwesomeSearch", + Parameters: []string{"param1:value1"}, + ValueLocation: "hits.hits[0]._source.value", + TargetValue: 12, + MetricName: "s0-elasticsearch-myAwesomeSearch", }, expectedError: nil, }, @@ -247,16 +261,16 @@ var testCases = []parseElasticsearchMetadataTestData{ "password": "password", }, expectedMetadata: &elasticsearchMetadata{ - addresses: []string{"http://localhost:9200", "http://localhost:9201"}, - unsafeSsl: false, - indexes: []string{"index1"}, - username: "admin", - password: "password", - searchTemplateName: "myAwesomeSearch", - parameters: []string{"param1:value1"}, - valueLocation: "hits.hits[0]._source.value", - targetValue: 12, - metricName: "s0-elasticsearch-myAwesomeSearch", + Addresses: []string{"http://localhost:9200", "http://localhost:9201"}, + UnsafeSsl: false, + Index: []string{"index1"}, + Username: "admin", + Password: "password", + SearchTemplateName: "myAwesomeSearch", + Parameters: []string{"param1:value1"}, + ValueLocation: "hits.hits[0]._source.value", + TargetValue: 12, + MetricName: "s0-elasticsearch-myAwesomeSearch", }, expectedError: nil, }, @@ -279,16 +293,16 @@ var testCases = []parseElasticsearchMetadataTestData{ "ELASTICSEARCH_PASSWORD": "password", }, expectedMetadata: &elasticsearchMetadata{ - addresses: []string{"http://localhost:9200", "http://localhost:9201"}, - unsafeSsl: false, - indexes: []string{"index1"}, - username: "admin", - password: "password", - searchTemplateName: "myAwesomeSearch", - parameters: []string{"param1:value1"}, - valueLocation: "hits.hits[0]._source.value", - targetValue: 12, - metricName: "s0-elasticsearch-myAwesomeSearch", + Addresses: []string{"http://localhost:9200", "http://localhost:9201"}, + UnsafeSsl: false, + Index: []string{"index1"}, + Username: "admin", + Password: "password", + SearchTemplateName: "myAwesomeSearch", + Parameters: []string{"param1:value1"}, + ValueLocation: "hits.hits[0]._source.value", + TargetValue: 12, + MetricName: "s0-elasticsearch-myAwesomeSearch", }, expectedError: nil, }, @@ -303,11 +317,12 @@ func TestParseElasticsearchMetadata(t *testing.T) { ResolvedEnv: tc.resolvedEnv, }) if tc.expectedError != nil { - assert.ErrorIs(t, err, tc.expectedError) + assert.Error(t, err) + assert.Contains(t, err.Error(), tc.expectedError.Error()) } else { assert.NoError(t, err) fmt.Println(tc.name) - assert.Equal(t, tc.expectedMetadata, metadata) + assert.Equal(t, tc.expectedMetadata, &metadata) } }) } @@ -329,16 +344,16 @@ func TestUnsafeSslDefaultValue(t *testing.T) { "password": "password", }, expectedMetadata: &elasticsearchMetadata{ - addresses: []string{"http://localhost:9200"}, - unsafeSsl: false, - indexes: []string{"index1"}, - username: "admin", - password: "password", - searchTemplateName: "myAwesomeSearch", - parameters: []string{"param1:value1"}, - valueLocation: "hits.hits[0]._source.value", - targetValue: 12, - metricName: "s0-elasticsearch-myAwesomeSearch", + Addresses: []string{"http://localhost:9200"}, + UnsafeSsl: false, + Index: []string{"index1"}, + Username: "admin", + Password: "password", + SearchTemplateName: "myAwesomeSearch", + Parameters: []string{"param1:value1"}, + ValueLocation: "hits.hits[0]._source.value", + TargetValue: 12, + MetricName: "s0-elasticsearch-myAwesomeSearch", }, expectedError: nil, } @@ -347,7 +362,7 @@ func TestUnsafeSslDefaultValue(t *testing.T) { AuthParams: tc.authParams, }) assert.NoError(t, err) - assert.Equal(t, tc.expectedMetadata, metadata) + assert.Equal(t, tc.expectedMetadata, &metadata) } func TestBuildQuery(t *testing.T) { @@ -443,7 +458,7 @@ func TestBuildQuery(t *testing.T) { AuthParams: tc.authParams, }) assert.NoError(t, err) - assert.Equal(t, tc.expectedQuery, buildQuery(metadata)) + assert.Equal(t, tc.expectedQuery, buildQuery(&metadata)) }) } } @@ -462,7 +477,8 @@ func TestElasticsearchGetMetricSpecForScaling(t *testing.T) { TriggerIndex: testData.triggerIndex, }) if testData.metadataTestData.expectedError != nil { - assert.ErrorIs(t, err, testData.metadataTestData.expectedError) + assert.Error(t, err) + assert.Contains(t, err.Error(), testData.metadataTestData.expectedError.Error()) continue } if err != nil { diff --git a/pkg/scalers/kafka_scaler.go b/pkg/scalers/kafka_scaler.go index 1d05b4a6527..48d6b3c9069 100644 --- a/pkg/scalers/kafka_scaler.go +++ b/pkg/scalers/kafka_scaler.go @@ -49,8 +49,9 @@ type kafkaScaler struct { } const ( - stringEnable = "enable" - stringDisable = "disable" + stringEnable = "enable" + stringDisable = "disable" + defaultUnsafeSsl = false ) type kafkaMetadata struct { diff --git a/pkg/scalers/scalersconfig/typed_config.go b/pkg/scalers/scalersconfig/typed_config.go index 1833c49d628..9cba6254225 100644 --- a/pkg/scalers/scalersconfig/typed_config.go +++ b/pkg/scalers/scalersconfig/typed_config.go @@ -62,8 +62,6 @@ const ( // separators for map and slice elements const ( - elemSeparator = "," - // TODO: support custom separator https://github.com/kedacore/keda/pull/5961/files#r1694991497 elemKeyValSeparator = "=" ) @@ -77,6 +75,7 @@ const ( enumTag = "enum" exclusiveSetTag = "exclusiveSet" rangeTag = "range" + separatorTag = "separator" ) // Params is a struct that represents the parameter list that can be used in the keda tag @@ -110,6 +109,9 @@ type Params struct { // RangeSeparator is the 'range' tag parameter defining the separator for range values RangeSeparator string + + // Separator is the tag parameter to define which separator will be used + Separator string } // IsNested is a function that returns true if the parameter is nested @@ -209,7 +211,7 @@ func (sc *ScalerConfig) setValue(field reflect.Value, params Params) error { enumMap[e] = true } missingMap := make(map[string]bool) - split := strings.Split(valFromConfig, elemSeparator) + split := splitWithSeparator(valFromConfig, params.Separator) for _, s := range split { s := strings.TrimSpace(s) if !enumMap[s] { @@ -225,7 +227,7 @@ func (sc *ScalerConfig) setValue(field reflect.Value, params Params) error { for _, e := range params.ExclusiveSet { exclusiveMap[e] = true } - split := strings.Split(valFromConfig, elemSeparator) + split := splitWithSeparator(valFromConfig, params.Separator) exclusiveCount := 0 for _, s := range split { s := strings.TrimSpace(s) @@ -277,7 +279,7 @@ func setConfigValueURLParams(params Params, valFromConfig string, field reflect. // setConfigValueMap is a function that sets the value of the map field func setConfigValueMap(params Params, valFromConfig string, field reflect.Value) error { field.Set(reflect.MakeMap(reflect.MapOf(field.Type().Key(), field.Type().Elem()))) - split := strings.Split(valFromConfig, elemSeparator) + split := splitWithSeparator(valFromConfig, params.Separator) for _, s := range split { s := strings.TrimSpace(s) kv := strings.Split(s, elemKeyValSeparator) @@ -315,6 +317,15 @@ func canRange(valFromConfig, elemRangeSeparator string, field reflect.Value) boo return strings.Contains(valFromConfig, elemRangeSeparator) } +// splitWithSeparator is a function that splits on default or custom separator +func splitWithSeparator(valFromConfig, customSeparator string) []string { + separator := "," + if customSeparator != "" { + separator = customSeparator + } + return strings.Split(valFromConfig, separator) +} + // setConfigValueRange is a function that sets the value of the range field func setConfigValueRange(params Params, valFromConfig string, field reflect.Value) error { rangeSplit := strings.Split(valFromConfig, params.RangeSeparator) @@ -343,7 +354,7 @@ func setConfigValueRange(params Params, valFromConfig string, field reflect.Valu // setConfigValueSlice is a function that sets the value of the slice field func setConfigValueSlice(params Params, valFromConfig string, field reflect.Value) error { elemIfc := reflect.New(field.Type().Elem()).Interface() - split := strings.Split(valFromConfig, elemSeparator) + split := splitWithSeparator(valFromConfig, params.Separator) for i, s := range split { s := strings.TrimSpace(s) if canRange(s, params.RangeSeparator, field) { @@ -474,6 +485,10 @@ func paramsFromTag(tag string, field reflect.StructField) (Params, error) { if len(tsplit) == 2 { params.RangeSeparator = strings.TrimSpace(tsplit[1]) } + case separatorTag: + if len(tsplit) > 1 { + params.Separator = strings.TrimSpace(tsplit[1]) + } case "": continue default: diff --git a/pkg/scalers/scalersconfig/typed_config_test.go b/pkg/scalers/scalersconfig/typed_config_test.go index 26b189c8dc5..996e45a1dd8 100644 --- a/pkg/scalers/scalersconfig/typed_config_test.go +++ b/pkg/scalers/scalersconfig/typed_config_test.go @@ -216,14 +216,16 @@ func TestSlice(t *testing.T) { sc := &ScalerConfig{ TriggerMetadata: map[string]string{ - "sliceVal": "1,2,3", - "sliceValWithSpaces": "1, 2, 3", + "sliceVal": "1,2,3", + "sliceValWithSpaces": "1, 2, 3", + "sliceValWithOtherSeparator": "1;2;3", }, } type testStruct struct { - SliceVal []int `keda:"name=sliceVal, order=triggerMetadata"` - SliceValWithSpaces []int `keda:"name=sliceValWithSpaces, order=triggerMetadata"` + SliceVal []int `keda:"name=sliceVal, order=triggerMetadata"` + SliceValWithSpaces []int `keda:"name=sliceValWithSpaces, order=triggerMetadata"` + SliceValWithOtherSeparator []int `keda:"name=sliceValWithOtherSeparator, order=triggerMetadata, separator=;"` } ts := testStruct{} @@ -237,6 +239,10 @@ func TestSlice(t *testing.T) { Expect(ts.SliceValWithSpaces[0]).To(Equal(1)) Expect(ts.SliceValWithSpaces[1]).To(Equal(2)) Expect(ts.SliceValWithSpaces[2]).To(Equal(3)) + Expect(ts.SliceValWithOtherSeparator).To(HaveLen(3)) + Expect(ts.SliceValWithOtherSeparator[0]).To(Equal(1)) + Expect(ts.SliceValWithOtherSeparator[1]).To(Equal(2)) + Expect(ts.SliceValWithOtherSeparator[2]).To(Equal(3)) } // TestEnum tests the enum type