diff --git a/test/config/config.go b/test/config/config.go index 398928a0bb..915bd08c6d 100644 --- a/test/config/config.go +++ b/test/config/config.go @@ -25,6 +25,7 @@ var TestConfig Config type Config struct { // Log + LocalConfigDir string `mapstructure:"local_config_dir" yaml:"local_config_dir"` GeneratedLogDir string `mapstructure:"generated_log_dir" yaml:"generated_log_dir"` WorkDir string `mapstructure:"work_dir" yaml:"work_dir"` // Host @@ -65,9 +66,10 @@ func ParseConfig() { TestConfig = Config{} // Log + TestConfig.LocalConfigDir = os.Getenv("LOCAL_CONFIG_DIR") TestConfig.GeneratedLogDir = os.Getenv("GENERATED_LOG_DIR") if len(TestConfig.GeneratedLogDir) == 0 { - TestConfig.GeneratedLogDir = "/tmp/ilogtail" + TestConfig.GeneratedLogDir = "/tmp/loongcollector" } TestConfig.WorkDir = os.Getenv("WORK_DIR") diff --git a/test/config/context.go b/test/config/context.go index ad0018d4dc..d6f4c0d57a 100644 --- a/test/config/context.go +++ b/test/config/context.go @@ -22,4 +22,5 @@ const ( ExposePortKey ContextKey = "exposePort" CurrentWorkingDeploymentKey ContextKey = "currentWorkingDeployment" QueryKey ContextKey = "query" + AgentPIDKey ContextKey = "agentPID" ) diff --git a/test/e2e/test_cases/aggregator_context/docker-compose.yaml b/test/e2e/test_cases/aggregator_context/docker-compose.yaml index 3070014ee2..6635e84189 100644 --- a/test/e2e/test_cases/aggregator_context/docker-compose.yaml +++ b/test/e2e/test_cases/aggregator_context/docker-compose.yaml @@ -23,7 +23,7 @@ services: environment: - STDOUT_SWITCH=true depends_on: - - ilogtailC + - loongcollectorC container_2: build: context: . @@ -37,4 +37,4 @@ services: timeout: 5s retries: 3 depends_on: - - ilogtailC + - loongcollectorC diff --git a/test/e2e/test_cases/aggregator_context/ilogtail-e2e.yaml b/test/e2e/test_cases/aggregator_context/ilogtail-e2e.yaml deleted file mode 100644 index 0dd724af88..0000000000 --- a/test/e2e/test_cases/aggregator_context/ilogtail-e2e.yaml +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2021 iLogtail Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -boot: - category: docker-compose -ilogtail: - config: - - name: aggregator-context-case - detail: - - global: - DefaultLogQueueSize: 10 - inputs: - - Type: input_file - FilePaths: - - /root/test/example.log - EnableContainerDiscovery: true - ContainerFilters: - IncludeEnv: - STDOUT_SWITCH: "true" - processors: - - Type: processor_split_char - SourceKey: content - SplitSep: "|" - SplitKeys: ["no", "content"] - aggregators: - - Type: aggregator_context - close_wait: 5s -verify: - log_rules: - - name: log-context-check - validator: log_context - spec: - system_rules: - - name: counter-check - validator: sys_counter - spec: - expect_equal_processed_log: true - expect_equal_flush_log: true - expect_received_minimum_log_num: 200 -testing_interval: 15s -retry: - times: 0 - interval: 10s diff --git a/test/e2e/test_cases/flusher_clickhouse/case.feature b/test/e2e/test_cases/flusher_clickhouse/case.feature index 84669e46e4..02fd8f8d61 100644 --- a/test/e2e/test_cases/flusher_clickhouse/case.feature +++ b/test/e2e/test_cases/flusher_clickhouse/case.feature @@ -36,7 +36,7 @@ Feature: flusher clickhouse BufferMinBytes: 10000000 BufferMaxBytes: 100000000 """ - Given iLogtail depends on containers {["clickhouse"]} + Given loongcollector depends on containers {["clickhouse"]} When start docker-compose {flusher_clickhouse} Then there is at least {10} logs Then the log fields match kv diff --git a/test/e2e/test_cases/flusher_elasticsearch/case.feature b/test/e2e/test_cases/flusher_elasticsearch/case.feature index 5e092e7e49..78b9343e8d 100644 --- a/test/e2e/test_cases/flusher_elasticsearch/case.feature +++ b/test/e2e/test_cases/flusher_elasticsearch/case.feature @@ -30,7 +30,7 @@ Feature: flusher elasticsearch Username: elastic Password: BtpoRTeyjmC=ruTIUoNN """ - Given iLogtail depends on containers {["elasticsearch"]} + Given loongcollector depends on containers {["elasticsearch"]} When start docker-compose {flusher_elasticsearch} Then there is at least {10} logs Then the log fields match kv diff --git a/test/e2e/test_cases/flusher_http/case.feature b/test/e2e/test_cases/flusher_http/case.feature index b660f48240..b4c430069c 100644 --- a/test/e2e/test_cases/flusher_http/case.feature +++ b/test/e2e/test_cases/flusher_http/case.feature @@ -38,7 +38,7 @@ Feature: flusher http - Type: ext_request_breaker FailureRatio: 0.1 """ - Given iLogtail depends on containers {["influxdb"]} + Given loongcollector depends on containers {["influxdb"]} When start docker-compose {flusher_http} Then there is at least {10} logs Then the log fields match kv diff --git a/test/e2e/test_cases/flusher_loki/case.feature b/test/e2e/test_cases/flusher_loki/case.feature index 37bf0209e2..59ad65b702 100644 --- a/test/e2e/test_cases/flusher_loki/case.feature +++ b/test/e2e/test_cases/flusher_loki/case.feature @@ -8,10 +8,10 @@ Feature: flusher loki Given subcribe data from {loki} with config """ address: http://loki:3100 - tenant_id: ilogtail + tenant_id: loongcollector target_labels: loki_name: hello - source: ilogtail + source: loongcollector """ Given {flusher-loki-case} local config as below """ @@ -29,7 +29,7 @@ Feature: flusher loki TagFieldsRename: loki_name: name URL: http://loki:3100/loki/api/v1/push - TenantID: ilogtail + TenantID: loongcollector MaxMessageWait: 100000000 MaxMessageBytes: 1024 Timeout: 1000000000000 @@ -39,9 +39,9 @@ Feature: flusher loki DynamicLabels: - tag.loki_name StaticLabels: - source: ilogtail + source: loongcollector """ - Given iLogtail depends on containers {["loki"]} + Given loongcollector depends on containers {["loki"]} When start docker-compose {flusher_loki} Then there is at least {10} logs Then the log fields match kv diff --git a/test/e2e/test_cases/input_canal/case.feature b/test/e2e/test_cases/input_canal/case.feature index 223353782f..2599a69850 100644 --- a/test/e2e/test_cases/input_canal/case.feature +++ b/test/e2e/test_cases/input_canal/case.feature @@ -22,7 +22,7 @@ Feature: input canal TextToString: true EnableDDL: true """ - Given iLogtail depends on containers {["mysql"]} + Given loongcollector depends on containers {["mysql"]} When start docker-compose {input_canal} When generate {10} http logs, with interval {10}ms, url: {http://client:10999/add/data}, method: {GET}, body: """ diff --git a/test/e2e/test_cases/input_canal_binfile_mode/case.feature b/test/e2e/test_cases/input_canal_binfile_mode/case.feature index dbe5e93df5..86a6dc6892 100644 --- a/test/e2e/test_cases/input_canal_binfile_mode/case.feature +++ b/test/e2e/test_cases/input_canal_binfile_mode/case.feature @@ -22,7 +22,7 @@ Feature: input canal binfile mode TextToString: true EnableDDL: true """ - Given iLogtail depends on containers {["mysql"]} + Given loongcollector depends on containers {["mysql"]} When start docker-compose {input_canal} When generate {10} http logs, with interval {10}ms, url: {http://client:10999/add/data}, method: {GET}, body: """ diff --git a/test/e2e/test_cases/input_docker_rawstdout/docker-compose.yaml b/test/e2e/test_cases/input_docker_rawstdout/docker-compose.yaml index a5812868cb..2e620913b3 100644 --- a/test/e2e/test_cases/input_docker_rawstdout/docker-compose.yaml +++ b/test/e2e/test_cases/input_docker_rawstdout/docker-compose.yaml @@ -23,4 +23,4 @@ services: environment: - STDOUT_SWITCH=true depends_on: - - ilogtailC + - loongcollectorC diff --git a/test/e2e/test_cases/input_docker_rawstdout_multiline/docker-compose.yaml b/test/e2e/test_cases/input_docker_rawstdout_multiline/docker-compose.yaml index a5812868cb..2e620913b3 100644 --- a/test/e2e/test_cases/input_docker_rawstdout_multiline/docker-compose.yaml +++ b/test/e2e/test_cases/input_docker_rawstdout_multiline/docker-compose.yaml @@ -23,4 +23,4 @@ services: environment: - STDOUT_SWITCH=true depends_on: - - ilogtailC + - loongcollectorC diff --git a/test/e2e/test_cases/input_http_server/case.feature b/test/e2e/test_cases/input_http_server/case.feature index 45b3f86cae..e32241b31b 100644 --- a/test/e2e/test_cases/input_http_server/case.feature +++ b/test/e2e/test_cases/input_http_server/case.feature @@ -27,9 +27,9 @@ Feature: input http server FieldType: json ExpondJson: true """ - Given iLogtail expose port {18089} to {18089} + Given loongcollector expose port {18089} to {18089} When start docker-compose {input_http_server} - When generate {10} http logs, with interval {10}ms, url: {http://ilogtailC:18089/?db=mydb}, method: {POST}, body: + When generate {10} http logs, with interval {10}ms, url: {http://loongcollectorC:18089/?db=mydb}, method: {POST}, body: """ weather,city=hz value=32 """ diff --git a/test/e2e/test_cases/input_mssql/case.feature b/test/e2e/test_cases/input_mssql/case.feature index 94b611afff..1916cda023 100644 --- a/test/e2e/test_cases/input_mssql/case.feature +++ b/test/e2e/test_cases/input_mssql/case.feature @@ -28,7 +28,7 @@ Feature: input mssql Password: MSsqlpa#1word StateMent: "select * from LogtailTestTable where id > ? ORDER BY id" """ - Given iLogtail depends on containers {["setup"]} + Given loongcollector depends on containers {["setup"]} When start docker-compose {input_mssql} Then there is at least {4} logs Then the log fields match as below diff --git a/test/e2e/test_cases/input_mysql/case.feature b/test/e2e/test_cases/input_mysql/case.feature index 3f5dafa48c..92e5d3e8d4 100644 --- a/test/e2e/test_cases/input_mysql/case.feature +++ b/test/e2e/test_cases/input_mysql/case.feature @@ -28,7 +28,7 @@ Feature: input mysql CheckPointStart: "0" IntervalMs: 1000 """ - Given iLogtail depends on containers {["mysql"]} + Given loongcollector depends on containers {["mysql"]} When start docker-compose {input_mysql} Then there is at least {500} logs Then the log fields match as below diff --git a/test/e2e/test_cases/input_pgsql/case.feature b/test/e2e/test_cases/input_pgsql/case.feature index b28f47f1cc..10a44f1f38 100644 --- a/test/e2e/test_cases/input_pgsql/case.feature +++ b/test/e2e/test_cases/input_pgsql/case.feature @@ -28,7 +28,7 @@ Feature: input pgsql Password: postgres StateMent: "select * from specialalarmtest where id > $1" """ - Given iLogtail depends on containers {["pgsql"]} + Given loongcollector depends on containers {["pgsql"]} When start docker-compose {input_pgsql} Then there is at least {10} logs Then the log fields match as below diff --git a/test/e2e/test_cases/input_static_file/case.feature b/test/e2e/test_cases/input_static_file/case.feature index ae703115a7..27ecf88dae 100644 --- a/test/e2e/test_cases/input_static_file/case.feature +++ b/test/e2e/test_cases/input_static_file/case.feature @@ -20,7 +20,7 @@ Feature: input static file - "/root/test/**/a*.log" MaxDirSearchDepth: 10 """ - Given iLogtail container mount {./a.log} to {/root/test/1/2/3/axxxx.log} + Given loongcollector container mount {./a.log} to {/root/test/1/2/3/axxxx.log} When start docker-compose {input_static_file} Then there is at least {1000} logs Then the log fields match kv diff --git a/test/e2e/test_cases/reader_deleted/case.feature b/test/e2e/test_cases/reader_deleted/case.feature index 7fb295c6c1..16b29a166f 100644 --- a/test/e2e/test_cases/reader_deleted/case.feature +++ b/test/e2e/test_cases/reader_deleted/case.feature @@ -17,6 +17,6 @@ Feature: reader deleted - /root/test/simple.log FlushTimeoutSecs: 3 """ - Given iLogtail container mount {./volume} to {/root/test} + Given loongcollector container mount {./volume} to {/root/test} When start docker-compose {reader_deleted} Then there is at least {1} logs \ No newline at end of file diff --git a/test/e2e/test_cases/reader_deleted/docker-compose.yaml b/test/e2e/test_cases/reader_deleted/docker-compose.yaml index 8be92134c3..4bb16a628e 100644 --- a/test/e2e/test_cases/reader_deleted/docker-compose.yaml +++ b/test/e2e/test_cases/reader_deleted/docker-compose.yaml @@ -25,4 +25,4 @@ services: environment: - STDOUT_SWITCH=true depends_on: - - ilogtailC + - loongcollectorC diff --git a/test/e2e/test_cases/reader_flush_timeout/case.feature b/test/e2e/test_cases/reader_flush_timeout/case.feature index 081426c104..3f5697e66e 100644 --- a/test/e2e/test_cases/reader_flush_timeout/case.feature +++ b/test/e2e/test_cases/reader_flush_timeout/case.feature @@ -17,6 +17,6 @@ Feature: reader flush timeout - /root/test/simple.log FlushTimeoutSecs: 1 """ - Given iLogtail container mount {./a.log} to {/root/test/simple.log} + Given loongcollector container mount {./a.log} to {/root/test/simple.log} When start docker-compose {reader_flush_timeout} Then there is at least {5} logs \ No newline at end of file diff --git a/test/e2e/test_cases/reader_log_rotate/case.feature b/test/e2e/test_cases/reader_log_rotate/case.feature index 426c501e57..2c77fcd3e9 100644 --- a/test/e2e/test_cases/reader_log_rotate/case.feature +++ b/test/e2e/test_cases/reader_log_rotate/case.feature @@ -17,6 +17,6 @@ Feature: reader log rotate - /root/test/simple.log FlushTimeoutSecs: 2 """ - Given iLogtail container mount {./volume} to {/root/test} + Given loongcollector container mount {./volume} to {/root/test} When start docker-compose {reader_log_rotate} Then there is at least {6} logs \ No newline at end of file diff --git a/test/e2e/test_cases/reader_new_line_after_timeout/case.feature b/test/e2e/test_cases/reader_new_line_after_timeout/case.feature index ddb701f93a..c1d29ea848 100644 --- a/test/e2e/test_cases/reader_new_line_after_timeout/case.feature +++ b/test/e2e/test_cases/reader_new_line_after_timeout/case.feature @@ -17,6 +17,6 @@ Feature: reader new line after timeout - /root/test/a.log FlushTimeoutSecs: 1 """ - Given iLogtail container mount {./a.log} to {/root/test/a.log} + Given loongcollector container mount {./a.log} to {/root/test/a.log} When start docker-compose {reader_new_line_after_timeout} Then there is at least {6} logs \ No newline at end of file diff --git a/test/engine/cleanup/cache.go b/test/engine/cleanup/cache.go index a0224ba376..18d73319bb 100644 --- a/test/engine/cleanup/cache.go +++ b/test/engine/cleanup/cache.go @@ -21,7 +21,7 @@ import ( func GoTestCache(ctx context.Context) (context.Context, error) { command := "/usr/local/go/bin/go clean -testcache" - if err := setup.Env.ExecOnSource(ctx, command); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, command); err != nil { return ctx, err } return ctx, nil diff --git a/test/engine/cleanup/helper.go b/test/engine/cleanup/helper.go index 0a9408e749..c223800104 100644 --- a/test/engine/cleanup/helper.go +++ b/test/engine/cleanup/helper.go @@ -48,7 +48,6 @@ func All() { _, _ = AllGeneratedLog(ctx) _, _ = GoTestCache(ctx) _, _ = DeleteContainers(ctx) - // FIXME: if this test case has no subscriber and the previous one has subscriber, it will panic if subscriber.TestSubscriber != nil { _ = subscriber.TestSubscriber.Stop() } diff --git a/test/engine/cleanup/log.go b/test/engine/cleanup/log.go index a1a701c094..90780df8fa 100644 --- a/test/engine/cleanup/log.go +++ b/test/engine/cleanup/log.go @@ -23,7 +23,7 @@ import ( func AllGeneratedLog(ctx context.Context) (context.Context, error) { command := fmt.Sprintf("rm -rf %s/*", config.TestConfig.GeneratedLogDir) - if err := setup.Env.ExecOnSource(ctx, command); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, command); err != nil { return ctx, err } return ctx, nil diff --git a/test/engine/control/config.go b/test/engine/control/config.go index 3e68a8bd1d..bbde3ad4a0 100644 --- a/test/engine/control/config.go +++ b/test/engine/control/config.go @@ -32,7 +32,6 @@ import ( "github.com/alibaba/ilogtail/test/engine/setup/subscriber" ) -const iLogtailLocalConfigDir = "/usr/local/loongcollector/conf/local" const lotailpluginHTTPAddress = "ilogtailC:18689" const E2EProjectName = "e2e-test-project" const E2ELogstoreName = "e2e-test-logstore" @@ -55,8 +54,8 @@ func AddLocalConfig(ctx context.Context, configName, c string) (context.Context, } } else { command := fmt.Sprintf(`cd %s && cat << 'EOF' > %s.yaml -%s`, iLogtailLocalConfigDir, configName, c) - if err := setup.Env.ExecOnLogtail(command); err != nil { +%s`, config.TestConfig.LocalConfigDir, configName, c) + if _, err := setup.Env.ExecOnLogtail(command); err != nil { return ctx, err } time.Sleep(5 * time.Second) @@ -65,8 +64,8 @@ func AddLocalConfig(ctx context.Context, configName, c string) (context.Context, } func RemoveAllLocalConfig(ctx context.Context) (context.Context, error) { - command := fmt.Sprintf("cd %s && rm -rf *.yaml", iLogtailLocalConfigDir) - if err := setup.Env.ExecOnLogtail(command); err != nil { + command := fmt.Sprintf("cd %s && rm -rf *.yaml", config.TestConfig.LocalConfigDir) + if _, err := setup.Env.ExecOnLogtail(command); err != nil { return ctx, err } return ctx, nil diff --git a/test/engine/control/filter.go b/test/engine/control/kubernetes.go similarity index 74% rename from test/engine/control/filter.go rename to test/engine/control/kubernetes.go index f90c08f811..4a7272aba2 100644 --- a/test/engine/control/filter.go +++ b/test/engine/control/kubernetes.go @@ -57,3 +57,25 @@ func RemoveLabel(ctx context.Context, labelStr string) (context.Context, error) } return ctx, nil } + +func ApplyYaml(ctx context.Context, yaml string) (context.Context, error) { + if k8sEnv, ok := setup.Env.(*setup.K8sEnv); ok { + if err := k8sEnv.Apply(yaml); err != nil { + return ctx, err + } + } else { + return ctx, fmt.Errorf("try to apply yaml, but env is not k8s env") + } + return ctx, nil +} + +func DeleteYaml(ctx context.Context, yaml string) (context.Context, error) { + if k8sEnv, ok := setup.Env.(*setup.K8sEnv); ok { + if err := k8sEnv.Delete(yaml); err != nil { + return ctx, err + } + } else { + return ctx, fmt.Errorf("try to delete yaml, but env is not k8s env") + } + return ctx, nil +} diff --git a/test/engine/setup/controller/kubernetes.go b/test/engine/setup/controller/kubernetes.go index f17ed7e72a..39efb316a3 100644 --- a/test/engine/setup/controller/kubernetes.go +++ b/test/engine/setup/controller/kubernetes.go @@ -16,12 +16,20 @@ package controller import ( "context" "fmt" + "os" + "path/filepath" "time" "github.com/avast/retry-go/v4" corev1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/meta" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime/serializer/yaml" + "k8s.io/client-go/discovery" + "k8s.io/client-go/dynamic" "k8s.io/client-go/kubernetes" + "k8s.io/client-go/restmapper" "github.com/alibaba/ilogtail/test/config" ) @@ -39,6 +47,15 @@ type DeploymentController struct { k8sClient *kubernetes.Clientset } +type DaemonSetController struct { + k8sClient *kubernetes.Clientset +} + +type DynamicController struct { + discoveryClient discovery.DiscoveryInterface + dynamicClient dynamic.Interface +} + func NewDeploymentController(k8sClient *kubernetes.Clientset) *DeploymentController { return &DeploymentController{k8sClient: k8sClient} } @@ -182,10 +199,6 @@ func (c *DeploymentController) DeleteDeployment(deploymentName, deploymentNamesp return c.k8sClient.AppsV1().Deployments(deploymentNamespace).Delete(context.TODO(), deploymentName, metav1.DeleteOptions{}) } -type DaemonSetController struct { - k8sClient *kubernetes.Clientset -} - func NewDaemonSetController(k8sClient *kubernetes.Clientset) *DaemonSetController { return &DaemonSetController{k8sClient: k8sClient} } @@ -204,3 +217,82 @@ func (c *DaemonSetController) GetDaemonSetPods(dsName, dsNamespace string) (*cor } return pods, nil } + +func NewDynamicController(dynamicClient dynamic.Interface, discoveryClient discovery.DiscoveryInterface) *DynamicController { + return &DynamicController{dynamicClient: dynamicClient, discoveryClient: discoveryClient} +} + +func (c *DynamicController) Apply(filePath string) error { + // Parse the object from the YAML file + mapping, obj, err := c.parseObjFromYaml(filePath) + if err != nil { + return err + } + + // Apply the object to the Kubernetes cluster + namespace := obj.GetNamespace() + if namespace == "" { + namespace = "default" // Use default namespace if not specified + } + resourceInterface := c.dynamicClient.Resource(mapping.Resource).Namespace(namespace) + if _, err := resourceInterface.Get(context.TODO(), obj.GetName(), metav1.GetOptions{}); err != nil { + // Object does not exist, create it + if _, err := resourceInterface.Create(context.TODO(), obj, metav1.CreateOptions{}); err != nil { + return err + } + } else { + // Object exists, update it + if _, err := resourceInterface.Update(context.TODO(), obj, metav1.UpdateOptions{}); err != nil { + return err + } + } + return nil +} + +func (c *DynamicController) Delete(filePath string) error { + // Parse the object from the YAML file + mapping, obj, err := c.parseObjFromYaml(filePath) + if err != nil { + return err + } + + // Delete the object from the Kubernetes cluster + namespace := obj.GetNamespace() + if namespace == "" { + namespace = "default" // Use default namespace if not specified + } + resourceInterface := c.dynamicClient.Resource(mapping.Resource).Namespace(namespace) + if err := resourceInterface.Delete(context.TODO(), obj.GetName(), metav1.DeleteOptions{}); err != nil { + return err + } + return nil +} + +func (c *DynamicController) parseObjFromYaml(filePath string) (*meta.RESTMapping, *unstructured.Unstructured, error) { + // Read the YAML file + basePath := "test_cases" + yamlFile, err := os.ReadFile(filepath.Join(basePath, filePath)) // #nosec G304 + if err != nil { + return nil, nil, err + } + + // Decode the YAML file into an unstructured.Unstructured object + decUnstructured := yaml.NewDecodingSerializer(unstructured.UnstructuredJSONScheme) + obj := &unstructured.Unstructured{} + _, gvk, err := decUnstructured.Decode(yamlFile, nil, obj) + if err != nil { + return nil, nil, err + } + + apiGroupResources, err := restmapper.GetAPIGroupResources(c.discoveryClient) + if err != nil { + return nil, nil, err + } + restMapper := restmapper.NewDiscoveryRESTMapper(apiGroupResources) + mapping, err := restMapper.RESTMapping(gvk.GroupKind(), gvk.Version) + if err != nil { + return nil, nil, err + } + + return mapping, obj, nil +} diff --git a/test/engine/setup/docker_compose.go b/test/engine/setup/docker_compose.go index 2c85bbbec9..6a7e3fc4de 100644 --- a/test/engine/setup/docker_compose.go +++ b/test/engine/setup/docker_compose.go @@ -123,10 +123,10 @@ func (d *DockerComposeEnv) Clean() error { return nil } -func (d *DockerComposeEnv) ExecOnLogtail(command string) error { - return fmt.Errorf("not implemented") +func (d *DockerComposeEnv) ExecOnLogtail(command string) (string, error) { + return "", fmt.Errorf("not implemented") } -func (d *DockerComposeEnv) ExecOnSource(ctx context.Context, command string) error { - return fmt.Errorf("not implemented") +func (d *DockerComposeEnv) ExecOnSource(ctx context.Context, command string) (string, error) { + return "", fmt.Errorf("not implemented") } diff --git a/test/engine/setup/dockercompose/compose.go b/test/engine/setup/dockercompose/compose.go index 351552f7e1..4163354568 100644 --- a/test/engine/setup/dockercompose/compose.go +++ b/test/engine/setup/dockercompose/compose.go @@ -55,9 +55,9 @@ services: timeout: 5s interval: 1s retries: 10 - ilogtailC: + loongcollectorC: image: aliyun/loongcollector:0.0.1 - hostname: ilogtail + hostname: loongcollector privileged: true pid: host volumes: @@ -139,8 +139,8 @@ func (c *ComposeBooter) Start(ctx context.Context) error { list, err := cli.ContainerList(context.Background(), types.ContainerListOptions{ Filters: filters.NewArgs( - filters.Arg("name", fmt.Sprintf("%s_ilogtailC*", projectName)), - filters.Arg("name", fmt.Sprintf("%s-ilogtailC*", projectName)), + filters.Arg("name", fmt.Sprintf("%s_loongcollectorC*", projectName)), + filters.Arg("name", fmt.Sprintf("%s-loongcollectorC*", projectName)), ), }) if len(list) != 1 { @@ -265,7 +265,7 @@ func (c *ComposeBooter) createComposeFile(ctx context.Context) error { } cfg := c.getLogtailpluginConfig() services := cfg["services"].(map[string]interface{}) - ilogtail := services["ilogtailC"].(map[string]interface{}) + loongcollector := services["loongcollectorC"].(map[string]interface{}) // merge docker compose file. if len(bytes) > 0 { caseCfg := make(map[string]interface{}) @@ -273,14 +273,14 @@ func (c *ComposeBooter) createComposeFile(ctx context.Context) error { return err } // depend on - ilogtailDependOn := map[string]interface{}{ + loongcollectorDependOn := map[string]interface{}{ "goc": map[string]string{ "condition": "service_healthy", }, } if dependOnContainers, ok := ctx.Value(config.DependOnContainerKey).([]string); ok { for _, container := range dependOnContainers { - ilogtailDependOn[container] = map[string]string{ + loongcollectorDependOn[container] = map[string]string{ "condition": "service_healthy", } } @@ -289,24 +289,24 @@ func (c *ComposeBooter) createComposeFile(ctx context.Context) error { for k := range newServices { services[k] = newServices[k] } - ilogtail["depends_on"] = ilogtailDependOn + loongcollector["depends_on"] = loongcollectorDependOn } // volume - ilogtailMount := services["ilogtailC"].(map[string]interface{})["volumes"].([]interface{}) + loongcollectorMount := services["loongcollectorC"].(map[string]interface{})["volumes"].([]interface{}) if volumes, ok := ctx.Value(config.MountVolumeKey).([]string); ok { for _, volume := range volumes { - ilogtailMount = append(ilogtailMount, volume) + loongcollectorMount = append(loongcollectorMount, volume) } } // ports - ilogtailPort := services["ilogtailC"].(map[string]interface{})["ports"].([]interface{}) + loongcollectorPort := services["loongcollectorC"].(map[string]interface{})["ports"].([]interface{}) if ports, ok := ctx.Value(config.ExposePortKey).([]string); ok { for _, port := range ports { - ilogtailPort = append(ilogtailPort, port) + loongcollectorPort = append(loongcollectorPort, port) } } - ilogtail["volumes"] = ilogtailMount - ilogtail["ports"] = ilogtailPort + loongcollector["volumes"] = loongcollectorMount + loongcollector["ports"] = loongcollectorPort yml, err := yaml.Marshal(cfg) if err != nil { return err @@ -314,7 +314,7 @@ func (c *ComposeBooter) createComposeFile(ctx context.Context) error { return os.WriteFile(config.CaseHome+finalFileName, yml, 0600) } -// getLogtailpluginConfig find the docker compose configuration of the ilogtail. +// getLogtailpluginConfig find the docker compose configuration of the loongcollector. func (c *ComposeBooter) getLogtailpluginConfig() map[string]interface{} { cfg := make(map[string]interface{}) f, _ := os.Create(config.CoverageFile) diff --git a/test/engine/setup/env.go b/test/engine/setup/env.go index 75a60cc595..c72cd5f65c 100644 --- a/test/engine/setup/env.go +++ b/test/engine/setup/env.go @@ -15,17 +15,19 @@ package setup import ( "context" + + "github.com/alibaba/ilogtail/test/config" ) var Env TestEnv type TestEnv interface { GetType() string - ExecOnLogtail(command string) error - ExecOnSource(ctx context.Context, command string) error + ExecOnLogtail(command string) (string, error) + ExecOnSource(ctx context.Context, command string) (string, error) } -func InitEnv(envType string) { +func InitEnv(ctx context.Context, envType string) (context.Context, error) { switch envType { case "host": Env = NewHostEnv() @@ -36,9 +38,23 @@ func InitEnv(envType string) { case "deployment": Env = NewDeploymentEnv() } + return SetAgentPID(ctx) } func Mkdir(ctx context.Context, dir string) (context.Context, error) { command := "mkdir -p " + dir - return ctx, Env.ExecOnSource(ctx, command) + _, err := Env.ExecOnSource(ctx, command) + return ctx, err +} + +func SetAgentPID(ctx context.Context) (context.Context, error) { + command := "ps -e | grep loongcollector | grep -v grep | awk '{print $1}'" + result, err := Env.ExecOnLogtail(command) + if err != nil { + if err.Error() == "not implemented" { + return ctx, nil + } + return ctx, err + } + return context.WithValue(ctx, config.AgentPIDKey, result), nil } diff --git a/test/engine/setup/host.go b/test/engine/setup/host.go index ea6fe500a8..830d195306 100644 --- a/test/engine/setup/host.go +++ b/test/engine/setup/host.go @@ -38,23 +38,23 @@ func (h *HostEnv) GetType() string { return "host" } -func (h *HostEnv) ExecOnLogtail(command string) error { +func (h *HostEnv) ExecOnLogtail(command string) (string, error) { return h.exec(command) } -func (h *HostEnv) ExecOnSource(ctx context.Context, command string) error { +func (h *HostEnv) ExecOnSource(ctx context.Context, command string) (string, error) { return h.exec(command) } -func (h *HostEnv) exec(command string) error { +func (h *HostEnv) exec(command string) (string, error) { if h.sshClient == nil { - return fmt.Errorf("ssh client init failed") + return "", fmt.Errorf("ssh client init failed") } result, err := h.sshClient.Run(command) if err != nil { - return fmt.Errorf("%v, %v", string(result), err) + return "", fmt.Errorf("%v, %v", string(result), err) } - return nil + return string(result), nil } func (h *HostEnv) initSSHClient() { diff --git a/test/engine/setup/k8s.go b/test/engine/setup/k8s.go index 7ee1c61aa9..826b1e3644 100644 --- a/test/engine/setup/k8s.go +++ b/test/engine/setup/k8s.go @@ -19,8 +19,11 @@ import ( "crypto/rand" "fmt" "math/big" + "sort" + "strings" corev1 "k8s.io/api/core/v1" + "k8s.io/client-go/dynamic" "k8s.io/client-go/kubernetes" "k8s.io/client-go/kubernetes/scheme" "k8s.io/client-go/rest" @@ -37,6 +40,7 @@ type K8sEnv struct { k8sClient *kubernetes.Clientset deploymentController *controller.DeploymentController daemonsetController *controller.DaemonSetController + dynamicController *controller.DynamicController } func NewDaemonSetEnv() *K8sEnv { @@ -59,34 +63,40 @@ func (k *K8sEnv) GetType() string { return k.deployType } -func (k *K8sEnv) ExecOnLogtail(command string) error { +func (k *K8sEnv) ExecOnLogtail(command string) (string, error) { if k.k8sClient == nil { - return fmt.Errorf("k8s client init failed") + return "", fmt.Errorf("k8s client init failed") } var pods *corev1.PodList var err error if k.deployType == "daemonset" { pods, err = k.daemonsetController.GetDaemonSetPods("logtail-ds", "kube-system") if err != nil { - return err + return "", err } } else if k.deployType == "deployment" { pods, err = k.deploymentController.GetRunningDeploymentPods("cluster-agent", "loong-collector") if err != nil { - return err + return "", err } } + results := make([]string, 0) + sort.Slice(pods.Items, func(i, j int) bool { + return pods.Items[i].Name < pods.Items[j].Name + }) for _, pod := range pods.Items { - if err := k.execInPod(k.config, pod.Namespace, pod.Name, pod.Spec.Containers[0].Name, []string{"bash", "-c", command}); err != nil { - return err + result, err := k.execInPod(k.config, pod.Namespace, pod.Name, pod.Spec.Containers[0].Name, []string{"bash", "-c", command}) + if err != nil { + return "", err } + results = append(results, result) } - return nil + return strings.Join(results, "\n"), nil } -func (k *K8sEnv) ExecOnSource(ctx context.Context, command string) error { +func (k *K8sEnv) ExecOnSource(ctx context.Context, command string) (string, error) { if k.k8sClient == nil { - return fmt.Errorf("k8s client init failed") + return "", fmt.Errorf("k8s client init failed") } deploymentName := "e2e-generator" if ctx.Value(config.CurrentWorkingDeploymentKey) != nil { @@ -94,18 +104,15 @@ func (k *K8sEnv) ExecOnSource(ctx context.Context, command string) error { } pods, err := k.deploymentController.GetRunningDeploymentPods(deploymentName, "default") if err != nil { - return err + return "", err } randomIndex, err := rand.Int(rand.Reader, big.NewInt(int64(len(pods.Items)))) if err != nil { - return err + return "", err } pod := pods.Items[randomIndex.Int64()] - fmt.Println("exec on pod: ", pod.Name) - if err := k.execInPod(k.config, pod.Namespace, pod.Name, pod.Spec.Containers[0].Name, []string{"sh", "-c", command}); err != nil { - return err - } - return nil + fmt.Println("exec on pod: ", pod.Name, "command: ", command) + return k.execInPod(k.config, pod.Namespace, pod.Name, pod.Spec.Containers[0].Name, []string{"sh", "-c", command}) } func (k *K8sEnv) AddFilter(filter controller.ContainerFilter) error { @@ -116,6 +123,14 @@ func (k *K8sEnv) RemoveFilter(filter controller.ContainerFilter) error { return k.deploymentController.RemoveFilter("e2e-generator", filter) } +func (k *K8sEnv) Apply(filePath string) error { + return k.dynamicController.Apply(filePath) +} + +func (k *K8sEnv) Delete(filePath string) error { + return k.dynamicController.Delete(filePath) +} + func SwitchCurrentWorkingDeployment(ctx context.Context, deploymentName string) (context.Context, error) { return context.WithValue(ctx, config.CurrentWorkingDeploymentKey, deploymentName), nil } @@ -139,9 +154,15 @@ func (k *K8sEnv) init() { k.k8sClient = k8sClient k.deploymentController = controller.NewDeploymentController(k.k8sClient) k.daemonsetController = controller.NewDaemonSetController(k.k8sClient) + + dynamicClient, err := dynamic.NewForConfig(c) + if err != nil { + panic(err) + } + k.dynamicController = controller.NewDynamicController(dynamicClient, k.k8sClient.Discovery()) } -func (k *K8sEnv) execInPod(config *rest.Config, namespace, podName, containerName string, command []string) error { +func (k *K8sEnv) execInPod(config *rest.Config, namespace, podName, containerName string, command []string) (string, error) { req := k.k8sClient.CoreV1().RESTClient(). Post(). Resource("pods"). @@ -158,7 +179,7 @@ func (k *K8sEnv) execInPod(config *rest.Config, namespace, podName, containerNam }, scheme.ParameterCodec) executor, err := remotecommand.NewSPDYExecutor(config, "POST", req.URL()) if err != nil { - return err + return "", err } var stdout, stderr bytes.Buffer err = executor.Stream(remotecommand.StreamOptions{ @@ -167,7 +188,7 @@ func (k *K8sEnv) execInPod(config *rest.Config, namespace, podName, containerNam Stderr: &stderr, }) if err != nil { - return err + return "", err } - return nil + return stdout.String(), nil } diff --git a/test/engine/setup/subscriber/grpc.go b/test/engine/setup/subscriber/grpc.go index e05137255b..ed02a4cd1e 100644 --- a/test/engine/setup/subscriber/grpc.go +++ b/test/engine/setup/subscriber/grpc.go @@ -44,7 +44,7 @@ type GrpcSubscriber struct { } func (g *GrpcSubscriber) Description() string { - return "this a gRPC subscriber, which is the default mock backend for Ilogtail." + return "this a gRPC subscriber, which is the default mock backend for loongcollector." } type GRPCService struct { diff --git a/test/engine/setup/subscriber/loki.go b/test/engine/setup/subscriber/loki.go index 702d13e8c7..82c77b0835 100644 --- a/test/engine/setup/subscriber/loki.go +++ b/test/engine/setup/subscriber/loki.go @@ -73,7 +73,7 @@ func (l *LokiSubscriber) Name() string { } func (l *LokiSubscriber) Description() string { - return "this a loki subscriber, which is the default mock backend for Ilogtail." + return "this a loki subscriber, which is the default mock backend for loongcollector." } func (l *LokiSubscriber) GetData(sql string, startTime int32) ([]*protocol.LogGroup, error) { diff --git a/test/engine/setup/subscriber/subscriber.go b/test/engine/setup/subscriber/subscriber.go index 62ea6bca32..247ffdd44c 100644 --- a/test/engine/setup/subscriber/subscriber.go +++ b/test/engine/setup/subscriber/subscriber.go @@ -36,7 +36,7 @@ var mu sync.Mutex // Creator creates a new subscriber instance according to the spec. type Creator func(spec map[string]interface{}) (Subscriber, error) -// Subscriber receives the logs transfer by ilogtail. +// Subscriber receives the logs transfer by loongcollector. type Subscriber interface { doc.Doc // Name of subscriber @@ -45,7 +45,7 @@ type Subscriber interface { Stop() error // Get data GetData(sql string, startTime int32) ([]*protocol.LogGroup, error) - // FlusherConfig returns the default flusher config for Ilogtail container to transfer the received or self telemetry data. + // FlusherConfig returns the default flusher config for loongcollector container to transfer the received or self telemetry data. FlusherConfig() string } diff --git a/test/engine/steps.go b/test/engine/steps.go index 1808aea2df..cbc17af94f 100644 --- a/test/engine/steps.go +++ b/test/engine/steps.go @@ -10,7 +10,6 @@ import ( "github.com/alibaba/ilogtail/test/engine/cleanup" "github.com/alibaba/ilogtail/test/engine/control" "github.com/alibaba/ilogtail/test/engine/setup" - "github.com/alibaba/ilogtail/test/engine/setup/monitor" "github.com/alibaba/ilogtail/test/engine/setup/subscriber" "github.com/alibaba/ilogtail/test/engine/trigger" "github.com/alibaba/ilogtail/test/engine/verify" @@ -18,41 +17,52 @@ import ( func ScenarioInitializer(ctx *godog.ScenarioContext) { // Given + // ------------------------------------------ ctx.Given(`^\{(\S+)\} environment$`, setup.InitEnv) - ctx.Given(`^iLogtail depends on containers \{(.*)\}`, setup.SetDockerComposeDependOn) - ctx.Given(`^iLogtail container mount \{(.*)\} to \{(.*)\}`, setup.MountVolume) - ctx.Given(`^iLogtail expose port \{(.*)\} to \{(.*)\}`, setup.ExposePort) + ctx.Given(`^loongcollector depends on containers \{(.*)\}`, setup.SetDockerComposeDependOn) + ctx.Given(`^loongcollector container mount \{(.*)\} to \{(.*)\}`, setup.MountVolume) + ctx.Given(`^loongcollector expose port \{(.*)\} to \{(.*)\}`, setup.ExposePort) ctx.Given(`^\{(.*)\} local config as below`, control.AddLocalConfig) ctx.Given(`^\{(.*)\} http config as below`, control.AddHTTPConfig) ctx.Given(`^remove http config \{(.*)\}`, control.RemoveHTTPConfig) ctx.Given(`^subcribe data from \{(\S+)\} with config`, subscriber.InitSubscriber) ctx.Given(`^mkdir \{(.*)\}`, setup.Mkdir) - ctx.Given(`^docker-compose boot type \{(\S+)\}$`, setup.SetDockerComposeBootType) + // ------------------------------------------ // When - ctx.When(`^generate \{(\d+)\} regex logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.RegexSingle) - ctx.When(`^generate \{(\d+)\} regex gbk logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.RegexSingleGBK) - ctx.When(`^generate \{(\d+)\} http logs, with interval \{(\d+)\}ms, url: \{(.*)\}, method: \{(.*)\}, body:`, trigger.HTTP) + // ------------------------------------------ ctx.When(`^add k8s label \{(.*)\}`, control.AddLabel) ctx.When(`^remove k8s label \{(.*)\}`, control.RemoveLabel) ctx.When(`^start docker-compose \{(\S+)\}`, setup.StartDockerComposeEnv) ctx.When(`^switch working on deployment \{(.*)\}`, setup.SwitchCurrentWorkingDeployment) - ctx.When(`^generate \{(\d+)\} apsara logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.Apsara) - ctx.When(`^generate \{(\d+)\} delimiter logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.DelimiterSingle) ctx.When(`^query through \{(.*)\}`, control.SetQuery) + ctx.When(`^apply yaml \{(.*)\} to k8s`, control.ApplyYaml) + ctx.When(`^delete yaml \{(.*)\} from k8s`, control.DeleteYaml) + + // generate ctx.When(`^begin trigger`, trigger.BeginTrigger) + ctx.When(`^generate \{(\d+)\} regex logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.RegexSingle) + ctx.When(`^generate \{(\d+)\} multiline regex logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.RegexMultiline) + ctx.When(`^generate \{(\d+)\} regex gbk logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.RegexSingleGBK) + ctx.When(`^generate \{(\d+)\} http logs, with interval \{(\d+)\}ms, url: \{(.*)\}, method: \{(.*)\}, body:`, trigger.HTTP) + ctx.When(`^generate \{(\d+)\} apsara logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.Apsara) + ctx.When(`^generate \{(\d+)\} delimiter logs to file \{(.*)\}, with interval \{(\d+)\}ms, with delimiter \{(.*)\} and quote \{(.*)\}$`, trigger.DelimiterSingle) + ctx.When(`^generate \{(\d+)\} multiline delimiter logs to file \{(.*)\}, with interval \{(\d+)\}ms, with delimiter \{(.*)\} and quote \{(.*)\}$`, trigger.DelimiterMultiline) + ctx.When(`^generate \{(\d+)\} json logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.JSONSingle) + ctx.When(`^generate \{(\d+)\} multiline json logs to file \{(.*)\}, with interval \{(\d+)\}ms$`, trigger.JSONMultiline) ctx.When(`^execute \{(\d+)\} commands to generate process security events`, trigger.TrigerProcessSecurityEvents) ctx.When(`^execute \{(\d+)\} commands to generate network security events on url \{(.*)\}$`, trigger.TrigerNetworksSecurityEvents) ctx.When(`^execute \{(\d+)\} commands to generate file security events on files \{(.*)\}$`, trigger.TrigerFileSecurityEvents) - ctx.When(`^generate random nginx logs to file, speed \{(\d+)\}MB/s, total \{(\d+)\}min, to file \{(.*)\}`, trigger.GenerateRandomNginxLogToFile) - ctx.When(`^start monitor \{(\S+)\}`, monitor.StartMonitor) - ctx.When(`^wait monitor until log processing finished$`, monitor.WaitMonitorUntilProcessingFinished) + // ------------------------------------------ // Then + // ------------------------------------------ + // log ctx.Then(`^there is \{(\d+)\} logs$`, verify.LogCount) ctx.Then(`^there is more than \{(\d+)\} metrics in \{(\d+)\} seconds and the value is greater than \{(\d+)\} and less than \{(\d+)\}$`, verify.MetricCountAndValueCompare) ctx.Then(`^there is more than \{(\d+)\} metrics in \{(\d+)\} seconds and the value is \{(\d+)\}$`, verify.MetricCountAndValueEqual) ctx.Then(`^there is more than \{(\d+)\} metrics in \{(\d+)\} seconds$`, verify.MetricCount) + ctx.Then(`^there is less than \{(\d+)\} logs$`, verify.LogCountLess) ctx.Then(`^there is at least \{(\d+)\} logs$`, verify.LogCountAtLeast) ctx.Then(`^there is at least \{(\d+)\} logs with filter key \{(.*)\} value \{(.*)\}$`, verify.LogCountAtLeastWithFilter) ctx.Then(`^the log fields match kv`, verify.LogFieldKV) @@ -61,13 +71,17 @@ func ScenarioInitializer(ctx *godog.ScenarioContext) { ctx.Then(`^the log fields match as below`, verify.LogField) ctx.Then(`^the log labels match as below`, verify.LogLabel) ctx.Then(`^the logtail log contains \{(\d+)\} times of \{(.*)\}$`, verify.LogtailPluginLog) + ctx.Then(`^the log is in order$`, verify.LogOrder) + + // metric + ctx.Then(`^there is more than \{(\d+)\} metrics in \{(\d+)\} seconds$`, verify.MetricCount) + + // other ctx.Then(`wait \{(\d+)\} seconds`, func(ctx context.Context, t int) context.Context { time.Sleep(time.Duration(t) * time.Second) return ctx }) - // special pattern logs - ctx.Then(`^the log fields match regex single`, verify.RegexSingle) - ctx.Then(`^the log fields match apsara`, verify.Apsara) + // ------------------------------------------ ctx.Before(func(ctx context.Context, sc *godog.Scenario) (context.Context, error) { config.ParseConfig() @@ -76,6 +90,6 @@ func ScenarioInitializer(ctx *godog.ScenarioContext) { }) ctx.After(func(ctx context.Context, sc *godog.Scenario, err error) (context.Context, error) { cleanup.All() - return ctx, nil + return verify.AgentNotCrash(ctx) }) } diff --git a/test/engine/trigger/ebpf_trigger.go b/test/engine/trigger/ebpf_trigger.go index f606cc1f5e..4b824c557e 100644 --- a/test/engine/trigger/ebpf_trigger.go +++ b/test/engine/trigger/ebpf_trigger.go @@ -39,7 +39,7 @@ func TrigerProcessSecurityEvents(ctx context.Context, commandCnt int) (context.C func execveCommands(ctx context.Context, commandCnt int) error { execveCommand := "ps -ef | grep loongcollector-e2e-test" for i := 0; i < commandCnt; i++ { - if err := setup.Env.ExecOnSource(ctx, execveCommand); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, execveCommand); err != nil { return err } } @@ -62,7 +62,7 @@ func TrigerNetworksSecurityEvents(ctx context.Context, commandCnt int, url strin func curlURL(ctx context.Context, commandCnt int, url string) error { curlCommand := "curl --connect-timeout 1 " + url + ";" for i := 0; i < commandCnt; i++ { - if err := setup.Env.ExecOnSource(ctx, curlCommand); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, curlCommand); err != nil { return err } } @@ -94,17 +94,17 @@ func rwFile(ctx context.Context, commandCnt int, filenames string) error { files := strings.Split(filenames, ",") for _, file := range files { touchFileCommand := "touch " + file + ";" - if err := setup.Env.ExecOnSource(ctx, touchFileCommand); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, touchFileCommand); err != nil { return err } catFileCommand := "echo 'Hello, World!' >> " + file + ";" for i := 0; i < commandCnt; i++ { - if err := setup.Env.ExecOnSource(ctx, catFileCommand); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, catFileCommand); err != nil { return err } } removeCommand := "rm " + file + ";" - if err := setup.Env.ExecOnSource(ctx, removeCommand); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, removeCommand); err != nil { return err } } @@ -125,7 +125,7 @@ func mmapFile(ctx context.Context, commandCnt int, filenames string) error { }); err != nil { return err } - if err := setup.Env.ExecOnSource(ctx, triggerEBPFCommand.String()); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, triggerEBPFCommand.String()); err != nil { return err } } @@ -138,15 +138,15 @@ func truncateFile(ctx context.Context, commandCnt int, filenames string) error { truncateFileCommand1 := "truncate -s 10k " + file + ";" truncateFileCommand2 := "truncate -s 0 " + file + ";" for i := 0; i < commandCnt/2; i++ { - if err := setup.Env.ExecOnSource(ctx, truncateFileCommand1); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, truncateFileCommand1); err != nil { return err } - if err := setup.Env.ExecOnSource(ctx, truncateFileCommand2); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, truncateFileCommand2); err != nil { return err } } if commandCnt%2 != 0 { - if err := setup.Env.ExecOnSource(ctx, truncateFileCommand1); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, truncateFileCommand1); err != nil { return err } } diff --git a/test/engine/trigger/generator/apsara_test.go b/test/engine/trigger/generator/apsara_test.go index f6cacf8996..40329c0cfe 100644 --- a/test/engine/trigger/generator/apsara_test.go +++ b/test/engine/trigger/generator/apsara_test.go @@ -15,7 +15,7 @@ package generator import ( "fmt" - "io" + "math/rand" "os" "strconv" "testing" @@ -24,28 +24,19 @@ import ( // TestGenerateApsara will be executed in the environment being collected. func TestGenerateApsara(t *testing.T) { - gneratedLogDir := getEnvOrDefault("GENERATED_LOG_DIR", "/tmp/ilogtail") - totalLog, err := strconv.Atoi(getEnvOrDefault("TOTAL_LOG", "100")) + config, err := getGenerateFileLogConfigFromEnv() if err != nil { - t.Fatalf("parse TOTAL_LOG failed: %v", err) + t.Fatalf("get generate file log config from env failed: %v", err) return } - interval, err := strconv.Atoi(getEnvOrDefault("INTERVAL", "1")) - if err != nil { - t.Fatalf("parse INTERVAL failed: %v", err) - return - } - fileName := getEnvOrDefault("FILENAME", "apsara.log") - - testLogConent := []string{ - "[%s]\t[ERROR]\t[32337]\t[/build/core/application/Application:12]\tfile:file0\tlogNo:1199997\tmark:-\tmsg:hello world!", - "[%s]\t[ERROR]\t[20964]\t[/build/core/ilogtail.cpp:127]\tfile:file0\tlogNo:1199998\tmark:F\tmsg:这是一条消息", - "[%s]\t[WARNING]\t[32337]\t[/build/core/ilogtail.cpp:127]\tfile:file0\tlogNo:1199999\tmark:-\tmsg:hello world!", - "[%s]\t[INFO]\t[32337]\t[/build/core/ilogtail.cpp:127]\tfile:file0\tlogNo:1200000\tmark:-\tmsg:这是一条消息", - "[%s]\t[ERROR]\t[00001]\t[/build/core/ilogtail.cpp:127]\tfile:file0\tlogNo:1199992\tmark:-\tmsg:password:123456", - "[%s]\t[DEBUG]\t[32337]\t[/build/core/ilogtail.cpp:127]\tfile:file0\tlogNo:1199993\tmark:-\tmsg:hello world!", - } - file, err := os.OpenFile(fmt.Sprintf("%s/%s", gneratedLogDir, fileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + testLogContentTmpl := string2Template([]string{ + "[{{.Time}}]\t[{{.Level}}]\t[32337]\t[/build/core/application/Application:12]\tfile:file{{.FileNo}}\tlogNo:{{.LogNo}}\tmark:{{.Mark}}\tmsg:hello world!\n", + "[{{.Time}}]\t[{{.Level}}]\t[20964]\t[/build/core/ilogtail.cpp:127]\tfile:file{{.FileNo}}\tlogNo:{{.LogNo}}\tmark:{{.Mark}}\tmsg:这是一条消息\n", + "[{{.Time}}]\t[{{.Level}}]\t[32337]\t[/build/core/ilogtail.cpp:127]\tfile:file{{.FileNo}}\tlogNo:{{.LogNo}}\tmark:{{.Mark}}\tmsg:hello world!\n", + "[{{.Time}}]\t[{{.Level}}]\t[32337]\t[/build/core/ilogtail.cpp:127]\tfile:file{{.FileNo}}\tlogNo:{{.LogNo}}\tmark:{{.Mark}}\tmsg:这是一条消息\n", + "[{{.Time}}]\t[{{.Level}}]\t[00001]\t[/build/core/ilogtail.cpp:127]\tfile:file{{.FileNo}}\tlogNo:{{.LogNo}}\tmark:{{.Mark}}\tmsg:password:123456\n", + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) if err != nil { t.Fatalf("open file failed: %v", err) return @@ -53,21 +44,29 @@ func TestGenerateApsara(t *testing.T) { defer file.Close() logIndex := 0 - for i := 0; i < totalLog; i++ { + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + for i := 0; i < config.TotalLog; i++ { var currentTime string if i%2 == 0 { currentTime = time.Now().Format("2006-01-02 15:04:05.000000") } else { currentTime = strconv.FormatInt(time.Now().UnixNano()/1000, 10) } - _, err := io.WriteString(file, fmt.Sprintf(testLogConent[logIndex]+"\n", currentTime)) + err = testLogContentTmpl[logIndex].Execute(file, map[string]interface{}{ + "Time": currentTime, + "Level": getRandomLogLevel(), + "LogNo": logNo + i, + "FileNo": fileNo, + "Mark": getRandomMark(), + }) if err != nil { t.Fatalf("write log failed: %v", err) return } - time.Sleep(time.Duration(interval * int(time.Millisecond))) + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) logIndex++ - if logIndex >= len(testLogConent) { + if logIndex >= len(testLogContentTmpl) { logIndex = 0 } } diff --git a/test/engine/trigger/generator/delimiter_test.go b/test/engine/trigger/generator/delimiter_test.go index 7388636cd1..354b6dea8c 100644 --- a/test/engine/trigger/generator/delimiter_test.go +++ b/test/engine/trigger/generator/delimiter_test.go @@ -15,36 +15,88 @@ package generator import ( "fmt" - "io" + "math/rand" "os" - "strconv" "testing" "time" ) // TestGenerateDelimiterSingle will be executed in the environment being collected. func TestGenerateDelimiterSingle(t *testing.T) { - gneratedLogDir := getEnvOrDefault("GENERATED_LOG_DIR", "/tmp/ilogtail") - totalLog, err := strconv.Atoi(getEnvOrDefault("TOTAL_LOG", "100")) + config, err := getGenerateFileLogConfigFromEnv("Delimiter", "Quote") if err != nil { - t.Fatalf("parse TOTAL_LOG failed: %v", err) + t.Fatalf("get generate file log config from env failed: %v", err) return } - interval, err := strconv.Atoi(getEnvOrDefault("INTERVAL", "1")) + delimiter := config.Custom["Delimiter"] + if delimiter == "" { + delimiter = " " + } + quote := config.Custom["Quote"] + if quote == "" { + quote = "" + } + testLogContentTmpl := string2Template([]string{ + "{{.Quote}}{{.Mark}}{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}0.0.0.0{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}GET{{.Quote}}{{.Delimiter}}{{.Quote}}/index.html{{.Quote}}{{.Delimiter}}{{.Quote}}HTTP/2.0{{.Quote}}{{.Delimiter}}{{.Quote}}302{{.Quote}}{{.Delimiter}}{{.Quote}}628{{.Quote}}{{.Delimiter}}{{.Quote}}curl/7.10{{.Quote}}\n", + "{{.Quote}}{{.Mark}}{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}10.45.26.0{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}GET{{.Quote}}{{.Delimiter}}{{.Quote}}/{{.Quote}}{{.Delimiter}}{{.Quote}}HTTP/2.0{{.Quote}}{{.Delimiter}}{{.Quote}}302{{.Quote}}{{.Delimiter}}{{.Quote}}218{{.Quote}}{{.Delimiter}}{{.Quote}}go-sdk{{.Quote}}\n", + "{{.Quote}}{{.Mark}}{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}10.45.26.0{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}GET{{.Quote}}{{.Delimiter}}{{.Quote}}/dir/resource.txt{{.Quote}}{{.Delimiter}}{{.Quote}}HTTP/1.1{{.Quote}}{{.Delimiter}}{{.Quote}}404{{.Quote}}{{.Delimiter}}{{.Quote}}744{{.Quote}}{{.Delimiter}}{{.Quote}}Mozilla/5.0{{.Quote}}\n", + "{{.Quote}}{{.Mark}}{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}127.0.0.1{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}PUT{{.Quote}}{{.Delimiter}}{{.Quote}}/{{.Quote}}{{.Delimiter}}{{.Quote}}HTTP/2.0{{.Quote}}{{.Delimiter}}{{.Quote}}200{{.Quote}}{{.Delimiter}}{{.Quote}}320{{.Quote}}{{.Delimiter}}{{.Quote}}curl/7.10{{.Quote}}\n", + "{{.Quote}}{{.Mark}}{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}192.168.0.3{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}PUT{{.Quote}}{{.Delimiter}}{{.Quote}}/dir/resource.txt{{.Quote}}{{.Delimiter}}{{.Quote}}HTTP/1.1{{.Quote}}{{.Delimiter}}{{.Quote}}404{{.Quote}}{{.Delimiter}}{{.Quote}}949{{.Quote}}{{.Delimiter}}{{.Quote}}curl/7.10{{.Quote}}\n", + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) if err != nil { - t.Fatalf("parse INTERVAL failed: %v", err) + t.Fatalf("open file failed: %v", err) return } - fileName := getEnvOrDefault("FILENAME", "apsara.log") + defer file.Close() - testLogConent := []string{ - "'-' 'file0' '13196' '0.0.0.0' '%s' 'GET' '/index.html' 'HTTP/2.0' '302' '628' 'curl/7.10'", - "'-' 'file0' '13197' '10.45.26.0' '%s' 'GET' '/' 'HTTP/2.0' '302' '218' 'go-sdk'", - "'-' 'file0' '13198' '10.45.26.0' '%s' 'GET' '/dir/resource.txt' 'HTTP/1.1' '404' '744' 'Mozilla/5.0'", - "'-' 'file0' '13199' '127.0.0.1' '%s' 'PUT' '/' 'HTTP/2.0' '200' '320' 'curl/7.10'", - "'-' 'file0' '13200' '192.168.0.3' '%s' 'PUT' '/dir/resource.txt' 'HTTP/1.1' '404' '949' 'curl/7.10'", + logIndex := 0 + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + for i := 0; i < config.TotalLog; i++ { + err = testLogContentTmpl[logIndex].Execute(file, map[string]interface{}{ + "Mark": getRandomMark(), + "FileNo": fileNo, + "LogNo": logNo, + "Time": time.Now().Format("2006-01-02 15:04:05.000000000"), + "Delimiter": delimiter, + "Quote": quote, + }) + if err != nil { + t.Fatalf("write log failed: %v", err) + return + } + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) + logIndex++ + if logIndex >= len(testLogContentTmpl) { + logIndex = 0 + } + } +} + +// TestGenerateDelimiterMultiline will be executed in the environment being collected. +func TestGenerateDelimiterMultiline(t *testing.T) { + config, err := getGenerateFileLogConfigFromEnv("Delimiter", "Quote") + if err != nil { + t.Fatalf("get generate file log config from env failed: %v", err) + return + } + delimiter := config.Custom["Delimiter"] + if delimiter == "" { + delimiter = " " + } + quote := config.Custom["Quote"] + if quote == "" { + quote = "" } - file, err := os.OpenFile(fmt.Sprintf("%s/%s", gneratedLogDir, fileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + testLogContentTmpl := string2Template([]string{ + "{{.Quote}}F{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}0.0.0.0{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}GET{{.Quote}}{{.Delimiter}}{{.Quote}}/index.html{{.Quote}}{{.Delimiter}}{{.Quote}}\nHTTP\n/2.0{{.Quote}}{{.Delimiter}}{{.Quote}}302{{.Quote}}{{.Delimiter}}{{.Quote}}628{{.Quote}}{{.Delimiter}}{{.Quote}}curl/7.10{{.Quote}}\n", + "{{.Quote}}-{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}10.45.26.0{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}GET{{.Quote}}{{.Delimiter}}{{.Quote}}/{{.Quote}}{{.Delimiter}}{{.Quote}}\nHTTP\n/2.0{{.Quote}}{{.Delimiter}}{{.Quote}}302{{.Quote}}{{.Delimiter}}{{.Quote}}218{{.Quote}}{{.Delimiter}}{{.Quote}}go-sdk{{.Quote}}\n", + "{{.Quote}}F{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}10.45.26.0{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}GET{{.Quote}}{{.Delimiter}}{{.Quote}}/dir/resource.txt{{.Quote}}{{.Delimiter}}{{.Quote}}\nHTTP\n/1.1{{.Quote}}{{.Delimiter}}{{.Quote}}404{{.Quote}}{{.Delimiter}}{{.Quote}}744{{.Quote}}{{.Delimiter}}{{.Quote}}Mozilla/5.0{{.Quote}}\n", + "{{.Quote}}-{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}127.0.0.1{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}PUT{{.Quote}}{{.Delimiter}}{{.Quote}}/{{.Quote}}{{.Delimiter}}{{.Quote}}\nHTTP\n/2.0{{.Quote}}{{.Delimiter}}{{.Quote}}200{{.Quote}}{{.Delimiter}}{{.Quote}}320{{.Quote}}{{.Delimiter}}{{.Quote}}curl/7.10{{.Quote}}\n", + "{{.Quote}}F{{.Quote}}{{.Delimiter}}{{.Quote}}file{{.FileNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}{{.LogNo}}{{.Quote}}{{.Delimiter}}{{.Quote}}192.168.0.3{{.Quote}}{{.Delimiter}}{{.Quote}}{{.Time}}{{.Quote}}{{.Delimiter}}{{.Quote}}PUT{{.Quote}}{{.Delimiter}}{{.Quote}}/dir/resource.txt{{.Quote}}{{.Delimiter}}{{.Quote}}\nHTTP\n/1.1{{.Quote}}{{.Delimiter}}{{.Quote}}404{{.Quote}}{{.Delimiter}}{{.Quote}}949{{.Quote}}{{.Delimiter}}{{.Quote}}curl/7.10{{.Quote}}\n", + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) if err != nil { t.Fatalf("open file failed: %v", err) return @@ -52,16 +104,23 @@ func TestGenerateDelimiterSingle(t *testing.T) { defer file.Close() logIndex := 0 - for i := 0; i < totalLog; i++ { - currentTime := time.Now().Format("2006-01-02 15:04:05.000000000") - _, err := io.WriteString(file, fmt.Sprintf(testLogConent[logIndex]+"\n", currentTime)) + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + for i := 0; i < config.TotalLog; i++ { + err = testLogContentTmpl[logIndex].Execute(file, map[string]interface{}{ + "FileNo": fileNo, + "LogNo": logNo, + "Time": time.Now().Format("2006-01-02 15:04:05.000000000"), + "Delimiter": delimiter, + "Quote": quote, + }) if err != nil { t.Fatalf("write log failed: %v", err) return } - time.Sleep(time.Duration(interval * int(time.Millisecond))) + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) logIndex++ - if logIndex >= len(testLogConent) { + if logIndex >= len(testLogContentTmpl) { logIndex = 0 } } diff --git a/test/engine/trigger/generator/ebpf_file_mmap_test.go b/test/engine/trigger/generator/ebpf_file_mmap_test.go index 618b9af1b4..56e221b54d 100644 --- a/test/engine/trigger/generator/ebpf_file_mmap_test.go +++ b/test/engine/trigger/generator/ebpf_file_mmap_test.go @@ -27,7 +27,7 @@ func TestGenerateMmapCommand(t *testing.T) { t.Fatalf("parse COMMAND_CNT failed: %v", err) return } - filename := getEnvOrDefault("FILE_NAME", "/tmp/ilogtail/ebpfFileSecurityHook3.log") + filename := getEnvOrDefault("FILE_NAME", "/tmp/loongcollector/ebpfFileSecurityHook3.log") f, err := os.Create(filename) if err != nil { panic(err) diff --git a/test/engine/trigger/generator/helper.go b/test/engine/trigger/generator/helper.go index ed80e86727..2514bb2614 100644 --- a/test/engine/trigger/generator/helper.go +++ b/test/engine/trigger/generator/helper.go @@ -14,9 +14,69 @@ package generator import ( + "crypto/rand" + "fmt" + "math/big" "os" + "strconv" + "text/template" + + "github.com/pkg/errors" ) +var Levels = []string{"ERROR", "INFO", "DEBUG", "WARNING"} + +type GenerateFileLogConfig struct { + GeneratedLogDir string + TotalLog int + Interval int + FileName string + Custom map[string]string +} + +func getGenerateFileLogConfigFromEnv(customKeys ...string) (*GenerateFileLogConfig, error) { + gneratedLogDir := getEnvOrDefault("GENERATED_LOG_DIR", "/tmp/loongcollector") + totalLog, err := strconv.Atoi(getEnvOrDefault("TOTAL_LOG", "100")) + if err != nil { + return nil, errors.Wrap(err, "parse TOTAL_LOG failed") + } + interval, err := strconv.Atoi(getEnvOrDefault("INTERVAL", "1")) + if err != nil { + return nil, errors.Wrap(err, "parse INTERVAL failed") + } + fileName := getEnvOrDefault("FILENAME", "default.log") + custom := make(map[string]string) + for _, key := range customKeys { + custom[key] = getEnvOrDefault(key, "") + } + return &GenerateFileLogConfig{ + GeneratedLogDir: gneratedLogDir, + TotalLog: totalLog, + Interval: interval, + FileName: fileName, + Custom: custom, + }, nil +} + +func string2Template(strings []string) []*template.Template { + templates := make([]*template.Template, len(strings)) + for i, str := range strings { + templates[i], _ = template.New(fmt.Sprintf("template_%d", i)).Parse(str) + } + return templates +} + +func getRandomLogLevel() string { + randInt, _ := rand.Int(rand.Reader, big.NewInt(int64(len(Levels)))) + return Levels[randInt.Int64()] +} + +func getRandomMark() string { + marks := []string{"-", "F"} + randInt, _ := rand.Int(rand.Reader, big.NewInt(int64(len(marks)))) + return marks[randInt.Int64()] +} + func getEnvOrDefault(env, fallback string) string { if value, ok := os.LookupEnv(env); ok { return value diff --git a/test/engine/trigger/generator/json_test.go b/test/engine/trigger/generator/json_test.go new file mode 100644 index 0000000000..0b4ef51b33 --- /dev/null +++ b/test/engine/trigger/generator/json_test.go @@ -0,0 +1,119 @@ +// Copyright 2024 iLogtail Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package generator + +import ( + "fmt" + "math/rand" + "os" + "strconv" + "testing" + "time" +) + +// TestGenerateJSONSingle will be executed in the environment being collected. +func TestGenerateJSONSingle(t *testing.T) { + config, err := getGenerateFileLogConfigFromEnv() + if err != nil { + t.Fatalf("get generate file log config from env failed: %v", err) + return + } + testLogContentTmpl := string2Template([]string{ + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"0.0.0.0","method":"POST","userAgent":"mozilla firefox","size":263} +`, + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"0.0.0.0","method":"GET","userAgent":"go-sdk","size":569} +`, + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"0.0.0.0","method":"HEAD","userAgent":"go-sdk","size":210} +`, + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"192.168.0.3","method":"PUT","userAgent":"curl/7.10","size":267} +`, + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + if err != nil { + t.Fatalf("open file failed: %v", err) + return + } + defer file.Close() + + logIndex := 0 + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + for i := 0; i < config.TotalLog; i++ { + var currentTime string + if i%2 == 0 { + currentTime = time.Now().Format("2006-01-02T15:04:05.999999999") + } else { + currentTime = strconv.FormatInt(time.Now().UnixNano()/1000, 10) + } + testLogContentTmpl[logIndex].Execute(file, map[string]interface{}{ + "Mark": getRandomMark(), + "FileNo": fileNo, + "LogNo": logNo + i, + "Time": currentTime, + }) + + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) + logIndex++ + if logIndex >= len(testLogContentTmpl) { + logIndex = 0 + } + } +} + +func TestGenerateJSONMultiline(t *testing.T) { + config, err := getGenerateFileLogConfigFromEnv() + if err != nil { + t.Fatalf("get generate file log config from env failed: %v", err) + return + } + testLogContentTmpl := string2Template([]string{ + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"0.0.0.0","method":"POST","userAgent":"mozilla firefox", +"size":263} +`, + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"0.0.0.0","method":"GET","userAgent":"go-sdk", +"size":569} +`, + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"0.0.0.0","method":"HEAD","userAgent":"go-sdk", +"size":210} +`, + `{"mark":"{{.Mark}}","file":"file{{.FileNo}}","logNo":{{.LogNo}},"time":"{{.Time}}","ip":"192.168.0.3","method":"PUT","userAgent":"curl/7.10", +"size":267} +`, + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + if err != nil { + t.Fatalf("open file failed: %v", err) + return + } + defer file.Close() + + logIndex := 0 + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + for i := 0; i < config.TotalLog; i++ { + currentTime := time.Now().Format("2006-01-02T15:04:05.999999999") + testLogContentTmpl[logIndex].Execute(file, map[string]interface{}{ + "Mark": getRandomMark(), + "FileNo": fileNo, + "LogNo": logNo + i, + "Time": currentTime, + }) + + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) + logIndex++ + if logIndex >= len(testLogContentTmpl) { + logIndex = 0 + } + } +} diff --git a/test/engine/trigger/generator/regex_test.go b/test/engine/trigger/generator/regex_test.go index b5950f3307..c50e65dc6e 100644 --- a/test/engine/trigger/generator/regex_test.go +++ b/test/engine/trigger/generator/regex_test.go @@ -17,8 +17,8 @@ import ( "bytes" "fmt" "io" + "math/rand" "os" - "strconv" "testing" "time" @@ -28,26 +28,22 @@ import ( // TestGenerateRegexLogSingle will be executed in the environment being collected. func TestGenerateRegexLogSingle(t *testing.T) { - gneratedLogDir := getEnvOrDefault("GENERATED_LOG_DIR", "/tmp/ilogtail") - totalLog, err := strconv.Atoi(getEnvOrDefault("TOTAL_LOG", "100")) + config, err := getGenerateFileLogConfigFromEnv() if err != nil { - t.Fatalf("parse TOTAL_LOG failed: %v", err) + t.Fatalf("get config failed: %v", err) return } - interval, err := strconv.Atoi(getEnvOrDefault("INTERVAL", "1")) - if err != nil { - t.Fatalf("parse INTERVAL failed: %v", err) - return - } - fileName := getEnvOrDefault("FILENAME", "regex_single.log") - - testLogConent := []string{ - `- file2:1 127.0.0.1 - [2024-01-07T12:40:10.505120] "HEAD / HTTP/2.0" 302 809 "未知" "这是一条消息,password:123456"`, - `- file2:2 127.0.0.1 - [2024-01-07T12:40:11.392101] "GET /index.html HTTP/2.0" 200 139 "Mozilla/5.0" "这是一条消息,password:123456,这是第二条消息,password:00000"`, - `- file2:3 10.45.26.0 - [2024-01-07T12:40:12.359314] "PUT /index.html HTTP/1.1" 200 913 "curl/7.10" "这是一条消息"`, - `- file2:4 192.168.0.3 - [2024-01-07T12:40:13.002661] "PUT /dir/resource.txt HTTP/2.0" 501 355 "go-sdk" "这是一条消息,password:123456"`, - } - file, err := os.OpenFile(fmt.Sprintf("%s/%s", gneratedLogDir, fileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + testLogContentTmpl := string2Template([]string{ + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 127.0.0.1 - [{{.Time}}] "HEAD / HTTP/2.0" 302 809 "未知" "这是一条消息,password:123456" +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 127.0.0.1 - [{{.Time}}] "GET /index.html HTTP/2.0" 200 139 "Mozilla/5.0" "这是一条消息,password:123456,这是第二条消息,password:00000" +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 10.45.26.0 - [{{.Time}}] "PUT /index.html HTTP/1.1" 200 913 "curl/7.10" "这是一条消息" +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 192.168.0.3 - [{{.Time}}] "PUT /dir/resource.txt HTTP/2.0" 501 355 "go-sdk" "这是一条消息,password:123456" +`, + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) if err != nil { t.Fatalf("open file failed: %v", err) return @@ -55,15 +51,27 @@ func TestGenerateRegexLogSingle(t *testing.T) { defer file.Close() logIndex := 0 - for i := 0; i < totalLog; i++ { - _, err := io.WriteString(file, testLogConent[logIndex]+"\n") + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + location, err := time.LoadLocation("Asia/Shanghai") + if err != nil { + t.Fatalf("load location failed: %v", err) + return + } + for i := 0; i < config.TotalLog; i++ { + err = testLogContentTmpl[logIndex].Execute(file, map[string]interface{}{ + "Time": time.Now().In(location).Format("2006-01-02T15:04:05.000000"), + "Mark": getRandomMark(), + "FileNo": fileNo, + "LogNo": logNo + i, + }) if err != nil { t.Fatalf("write log failed: %v", err) return } - time.Sleep(time.Duration(interval * int(time.Millisecond))) + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) logIndex++ - if logIndex >= len(testLogConent) { + if logIndex >= len(testLogContentTmpl) { logIndex = 0 } } @@ -71,51 +79,119 @@ func TestGenerateRegexLogSingle(t *testing.T) { // TestGenerateRegexLogSingleGBK will be executed in the environment being collected. func TestGenerateRegexLogSingleGBK(t *testing.T) { - gneratedLogDir := getEnvOrDefault("GENERATED_LOG_DIR", "/tmp/ilogtail") - totalLog, err := strconv.Atoi(getEnvOrDefault("TOTAL_LOG", "100")) + config, err := getGenerateFileLogConfigFromEnv() if err != nil { - t.Fatalf("parse TOTAL_LOG failed: %v", err) + t.Fatalf("get config failed: %v", err) return } - interval, err := strconv.Atoi(getEnvOrDefault("INTERVAL", "1")) + encoder := simplifiedchinese.GBK.NewEncoder() + testLogContentTmpl := string2Template([]string{ + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 127.0.0.1 - [{{.Time}}] "HEAD / HTTP/2.0" 302 809 "未知" "这是一条消息,password:123456" +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 127.0.0.1 - [{{.Time}}] "GET /index.html HTTP/2.0" 200 139 "Mozilla/5.0" "这是一条消息,password:123456,这是第二条消息,password:00000" +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 10.45.26.0 - [{{.Time}}] "PUT /index.html HTTP/1.1" 200 913 "curl/7.10" "这是一条消息" +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} 192.168.0.3 - [{{.Time}}] "PUT /dir/resource.txt HTTP/2.0" 501 355 "go-sdk" "这是一条消息,password:123456" +`, + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) if err != nil { - t.Fatalf("parse INTERVAL failed: %v", err) + t.Fatalf("open file failed: %v", err) return } - fileName := getEnvOrDefault("FILENAME", "regex_single.log") + defer file.Close() - encoder := simplifiedchinese.GBK.NewEncoder() - testLogConentUTF8 := []string{ - `- file2:1 127.0.0.1 - [2024-01-07T12:40:10.505120] "HEAD / HTTP/2.0" 302 809 "未知" "这是一条消息,password:123456"`, - `- file2:2 127.0.0.1 - [2024-01-07T12:40:11.392101] "GET /index.html HTTP/2.0" 200 139 "Mozilla/5.0" "这是一条消息,password:123456,这是第二条消息,password:00000"`, - `- file2:3 10.45.26.0 - [2024-01-07T12:40:12.359314] "PUT /index.html HTTP/1.1" 200 913 "curl/7.10" "这是一条消息"`, - `- file2:4 192.168.0.3 - [2024-01-07T12:40:13.002661] "PUT /dir/resource.txt HTTP/2.0" 501 355 "go-sdk" "这是一条消息,password:123456"`, + logIndex := 0 + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + location, err := time.LoadLocation("Asia/Shanghai") + if err != nil { + t.Fatalf("load location failed: %v", err) + return } - testLogConent := make([]string, 0, len(testLogConentUTF8)) - for _, log := range testLogConentUTF8 { - data, err1 := io.ReadAll(transform.NewReader(bytes.NewBuffer([]byte(log)), encoder)) + for i := 0; i < config.TotalLog; i++ { + var buffer bytes.Buffer + _ = testLogContentTmpl[logIndex].Execute(&buffer, map[string]interface{}{ + "Time": time.Now().In(location).Format("2006-01-02T15:04:05.000000"), + "Mark": getRandomMark(), + "FileNo": fileNo, + "LogNo": logNo + i, + }) + data, err1 := io.ReadAll(transform.NewReader(&buffer, encoder)) if err1 != nil { t.Fatalf("encode log failed: %v", err1) } - testLogConent = append(testLogConent, string(data)) + _, err := io.WriteString(file, string(data)) + if err != nil { + t.Fatalf("write log failed: %v", err) + return + } + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) + logIndex++ + if logIndex >= len(testLogContentTmpl) { + logIndex = 0 + } } - file, err := os.OpenFile(fmt.Sprintf("%s/%s", gneratedLogDir, fileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) +} + +func TestGenerateRegexLogMultiline(t *testing.T) { + config, err := getGenerateFileLogConfigFromEnv() + if err != nil { + t.Fatalf("get config failed: %v", err) + return + } + testLogContentTmpl := string2Template([]string{ + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} [{{.Time}}] [{{.Level}}] java.lang.Exception: exception happened +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f1(RegexMultiLog.java:73) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.run(RegexMultiLog.java:34) +at java.base/java.lang.Thread.run(Thread.java:833) +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} [{{.Time}}] [{{.Level}}] java.lang.Exception: 发生异常 +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f2(RegexMultiLog.java:80) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f1(RegexMultiLog.java:75) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.run(RegexMultiLog.java:34) +at java.base/java.lang.Thread.run(Thread.java:833) +`, + `{{.Mark}} file{{.FileNo}}:{{.LogNo}} [{{.Time}}] [{{.Level}}] java.lang.Exception: exception happened +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f5(RegexMultiLog.java:100) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f4(RegexMultiLog.java:96) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f3(RegexMultiLog.java:89) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f2(RegexMultiLog.java:82) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.f1(RegexMultiLog.java:75) +at com.aliyun.sls.devops.logGenerator.type.RegexMultiLog.run(RegexMultiLog.java:34) +at java.base/java.lang.Thread.run(Thread.java:833) +`, + }) + file, err := os.OpenFile(fmt.Sprintf("%s/%s", config.GeneratedLogDir, config.FileName), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) if err != nil { t.Fatalf("open file failed: %v", err) return } defer file.Close() - logIndex := 0 - for i := 0; i < totalLog; i++ { - _, err := io.WriteString(file, testLogConent[logIndex]+"\n") + logNo := rand.Intn(10000) + fileNo := rand.Intn(10000) + location, err := time.LoadLocation("Asia/Shanghai") + if err != nil { + t.Fatalf("load location failed: %v", err) + return + } + for i := 0; i < config.TotalLog; i++ { + err = testLogContentTmpl[logIndex].Execute(file, map[string]interface{}{ + "Time": time.Now().In(location).Format("2006-01-02T15:04:05.000000"), + "Level": getRandomLogLevel(), + "FileNo": fileNo, + "LogNo": logNo + i, + "Mark": getRandomMark(), + }) if err != nil { t.Fatalf("write log failed: %v", err) return } - time.Sleep(time.Duration(interval * int(time.Millisecond))) + time.Sleep(time.Duration(config.Interval * int(time.Millisecond))) logIndex++ - if logIndex >= len(testLogConent) { + if logIndex >= len(testLogContentTmpl) { logIndex = 0 } } diff --git a/test/engine/trigger/trigger.go b/test/engine/trigger/trigger.go index 75b038e823..63653c5cb6 100644 --- a/test/engine/trigger/trigger.go +++ b/test/engine/trigger/trigger.go @@ -15,15 +15,15 @@ package trigger import ( "context" - "html/template" "strings" + "text/template" "time" "github.com/alibaba/ilogtail/test/config" "github.com/alibaba/ilogtail/test/engine/setup" ) -const triggerRegexTemplate = "cd {{.WorkDir}} && TOTAL_LOG={{.TotalLog}} INTERVAL={{.Interval}} FILENAME={{.Filename}} GENERATED_LOG_DIR={{.GeneratedLogDir}} {{.Command}}" +const triggerTemplate = "cd {{.WorkDir}} && TOTAL_LOG={{.TotalLog}} INTERVAL={{.Interval}} FILENAME={{.Filename}} GENERATED_LOG_DIR={{.GeneratedLogDir}} {{.Custom}} {{.Command}}" func RegexSingle(ctx context.Context, totalLog int, path string, interval int) (context.Context, error) { return generate(ctx, totalLog, path, interval, "TestGenerateRegexLogSingle") @@ -33,37 +33,61 @@ func RegexSingleGBK(ctx context.Context, totalLog int, path string, interval int return generate(ctx, totalLog, path, interval, "TestGenerateRegexLogSingleGBK") } +func RegexMultiline(ctx context.Context, totalLog int, path string, interval int) (context.Context, error) { + return generate(ctx, totalLog, path, interval, "TestGenerateRegexLogMultiline") +} + +func JSONSingle(ctx context.Context, totalLog int, path string, interval int) (context.Context, error) { + return generate(ctx, totalLog, path, interval, "TestGenerateJSONSingle") +} + +func JSONMultiline(ctx context.Context, totalLog int, path string, interval int) (context.Context, error) { + return generate(ctx, totalLog, path, interval, "TestGenerateJSONMultiline") +} + func Apsara(ctx context.Context, totalLog int, path string, interval int) (context.Context, error) { return generate(ctx, totalLog, path, interval, "TestGenerateApsara") } -func DelimiterSingle(ctx context.Context, totalLog int, path string, interval int) (context.Context, error) { - return generate(ctx, totalLog, path, interval, "TestGenerateDelimiterSingle") +func DelimiterSingle(ctx context.Context, totalLog int, path string, interval int, delimiter, quote string) (context.Context, error) { + return generate(ctx, totalLog, path, interval, "TestGenerateDelimiterSingle", "Delimiter", delimiter, "Quote", quote) } -func generate(ctx context.Context, totalLog int, path string, interval int, commandName string) (context.Context, error) { +func DelimiterMultiline(ctx context.Context, totalLog int, path string, interval int, delimiter, quote string) (context.Context, error) { + return generate(ctx, totalLog, path, interval, "TestGenerateDelimiterMultiline", "Delimiter", delimiter, "Quote", quote) +} + +func generate(ctx context.Context, totalLog int, path string, interval int, commandName string, customKV ...string) (context.Context, error) { time.Sleep(3 * time.Second) command := getRunTriggerCommand(commandName) - var triggerRegexCommand strings.Builder - template := template.Must(template.New("trigger").Parse(triggerRegexTemplate)) + var triggerCommand strings.Builder + template := template.Must(template.New("trigger").Parse(triggerTemplate)) splittedPath := strings.Split(path, "/") dir := strings.Join(splittedPath[:len(splittedPath)-1], "/") filename := splittedPath[len(splittedPath)-1] - if err := template.Execute(&triggerRegexCommand, map[string]interface{}{ + customString := strings.Builder{} + for i := 0; i < len(customKV); i++ { + customString.WriteString(customKV[i]) + customString.WriteString("=") + customString.WriteString(customKV[i+1]) + customString.WriteString(" ") + i++ + } + if err := template.Execute(&triggerCommand, map[string]interface{}{ "WorkDir": config.TestConfig.WorkDir, "TotalLog": totalLog, "Interval": interval, "GeneratedLogDir": dir, "Filename": filename, + "Custom": customString.String(), "Command": command, }); err != nil { return ctx, err } - startTime := time.Now().Unix() - if err := setup.Env.ExecOnSource(ctx, triggerRegexCommand.String()); err != nil { + if _, err := setup.Env.ExecOnSource(ctx, triggerCommand.String()); err != nil { return ctx, err } - return context.WithValue(ctx, config.StartTimeContextKey, int32(startTime)), nil + return ctx, nil } func BeginTrigger(ctx context.Context) (context.Context, error) { diff --git a/test/engine/verify/agent.go b/test/engine/verify/agent.go new file mode 100644 index 0000000000..f8aac3233d --- /dev/null +++ b/test/engine/verify/agent.go @@ -0,0 +1,45 @@ +// Copyright 2024 iLogtail Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package verify + +import ( + "context" + "fmt" + + "github.com/alibaba/ilogtail/test/config" + "github.com/alibaba/ilogtail/test/engine/setup" +) + +const ( + queryPIDCommand = "ps -e | grep loongcollector | grep -v grep | awk '{print $1}'" +) + +func AgentNotCrash(ctx context.Context) (context.Context, error) { + // verify agent crash + result, err := setup.Env.ExecOnLogtail(queryPIDCommand) + if err != nil { + if err.Error() == "not implemented" { + return ctx, nil + } + return ctx, err + } + agentPID := ctx.Value(config.AgentPIDKey) + if agentPID == nil { + return ctx, fmt.Errorf("agent PID not found in context") + } + if result != agentPID { + return ctx, fmt.Errorf("agent crash, expect PID: %s, but got: %s", agentPID, result) + } + return ctx, nil +} diff --git a/test/engine/verify/apsara.go b/test/engine/verify/apsara.go deleted file mode 100644 index 220cebd3bb..0000000000 --- a/test/engine/verify/apsara.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2024 iLogtail Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package verify - -import ( - "context" - "fmt" - "strconv" - "time" - - "github.com/avast/retry-go/v4" - - "github.com/alibaba/ilogtail/pkg/protocol" - "github.com/alibaba/ilogtail/test/config" - "github.com/alibaba/ilogtail/test/engine/control" - "github.com/alibaba/ilogtail/test/engine/setup/subscriber" -) - -func Apsara(ctx context.Context) (context.Context, error) { - var from int32 - value := ctx.Value(config.StartTimeContextKey) - if value != nil { - from = value.(int32) - } else { - return ctx, fmt.Errorf("no start time") - } - fields := []string{"__FILE__", "__LEVEL__", "__LINE__", "__THREAD__", "file", "logNo", "mark", "microtime", "msg"} - timeoutCtx, cancel := context.WithTimeout(context.TODO(), config.TestConfig.RetryTimeout) - defer cancel() - var groups []*protocol.LogGroup - var err error - err = retry.Do( - func() error { - groups, err = subscriber.TestSubscriber.GetData(control.GetQuery(ctx), from) - if err != nil { - return err - } - for _, group := range groups { - for _, log := range group.Logs { - for _, field := range fields { - found := false - for _, content := range log.Contents { - if content.Key == field { - found = true - break - } - } - if !found { - return fmt.Errorf("field %s not found", field) - } - } - // validate time parse - var microtime int64 - var recordTime int64 - var nanoTime int64 - for _, content := range log.Contents { - if content.Key == "microtime" { - microtime, _ = strconv.ParseInt(content.Value, 10, 64) - } - if content.Key == "__time__" { - recordTime, _ = strconv.ParseInt(content.Value, 10, 64) - } - if content.Key == "__time_ns_part__" { - nanoTime, _ = strconv.ParseInt(content.Value, 10, 64) - } - } - if microtime != recordTime*1000000+nanoTime/1000 { - return fmt.Errorf("time parse error, microtime: %d, recordtime: %d, nanotime: %d", microtime, recordTime, nanoTime) - } - } - } - return err - }, - retry.Context(timeoutCtx), - retry.Delay(5*time.Second), - retry.DelayType(retry.FixedDelay), - ) - if err != nil { - return ctx, err - } - return ctx, nil -} diff --git a/test/engine/verify/count.go b/test/engine/verify/count.go index 53082ff184..0c87246712 100644 --- a/test/engine/verify/count.go +++ b/test/engine/verify/count.go @@ -71,6 +71,50 @@ func LogCount(ctx context.Context, expect int) (context.Context, error) { return ctx, nil } +func LogCountLess(ctx context.Context, expect int) (context.Context, error) { + var from int32 + value := ctx.Value(config.StartTimeContextKey) + if value != nil { + from = value.(int32) + } else { + return ctx, fmt.Errorf("no start time") + } + timeoutCtx, cancel := context.WithTimeout(context.TODO(), config.TestConfig.RetryTimeout) + defer cancel() + var groups []*protocol.LogGroup + var err error + var count int + err = retry.Do( + func() error { + count = 0 + groups, err = subscriber.TestSubscriber.GetData(control.GetQuery(ctx), from) + if err != nil { + return err + } + for _, group := range groups { + count += len(group.Logs) + } + if count != expect { + return fmt.Errorf("log count not match, expect %d, got %d, from %d", expect, count, from) + } + if expect == 0 { + return fmt.Errorf("log count is 0") + } + return nil + }, + retry.Context(timeoutCtx), + retry.Delay(5*time.Second), + retry.DelayType(retry.FixedDelay), + ) + if count > 0 && count < expect { + return ctx, nil + } + if err != nil { + return ctx, err + } + return ctx, nil +} + func MetricCheck(ctx context.Context, expect int, duration int64, checker func([]*protocol.LogGroup) error) (context.Context, error) { timeoutCtx, cancel := context.WithTimeout(context.TODO(), config.TestConfig.RetryTimeout) defer cancel() diff --git a/test/engine/verify/regex.go b/test/engine/verify/log_order.go similarity index 61% rename from test/engine/verify/regex.go rename to test/engine/verify/log_order.go index 45da3962de..cd2a836681 100644 --- a/test/engine/verify/regex.go +++ b/test/engine/verify/log_order.go @@ -4,18 +4,20 @@ // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + package verify import ( "context" "fmt" + "strconv" "time" "github.com/avast/retry-go/v4" @@ -26,7 +28,7 @@ import ( "github.com/alibaba/ilogtail/test/engine/setup/subscriber" ) -func RegexSingle(ctx context.Context) (context.Context, error) { +func LogOrder(ctx context.Context) (context.Context, error) { var from int32 value := ctx.Value(config.StartTimeContextKey) if value != nil { @@ -34,33 +36,15 @@ func RegexSingle(ctx context.Context) (context.Context, error) { } else { return ctx, fmt.Errorf("no start time") } - fields := []string{"mark", "file", "logNo", "ip", "time", "method", "url", "http", "status", "size", "userAgent", "msg"} + + // Get logs timeoutCtx, cancel := context.WithTimeout(context.TODO(), config.TestConfig.RetryTimeout) defer cancel() - var groups []*protocol.LogGroup var err error + var groups []*protocol.LogGroup err = retry.Do( func() error { groups, err = subscriber.TestSubscriber.GetData(control.GetQuery(ctx), from) - if err != nil { - return err - } - for _, group := range groups { - for _, log := range group.Logs { - for _, field := range fields { - found := false - for _, content := range log.Contents { - if content.Key == field { - found = true - break - } - } - if !found { - return fmt.Errorf("field %s not found", field) - } - } - } - } return err }, retry.Context(timeoutCtx), @@ -70,5 +54,38 @@ func RegexSingle(ctx context.Context) (context.Context, error) { if err != nil { return ctx, err } + + // Check log order + currentLogNo := 0 + for i := 0; i < len(groups); i++ { + for j := 0; j < len(groups[i].Logs); j++ { + if j == 0 { + currentLogNo, _ = getLogNoFromLog(groups[i].Logs[j]) + continue + } + if groups[i].Logs[j].Time > groups[i].Logs[j-1].Time { + if nextLogNo, ok := getLogNoFromLog(groups[i].Logs[j]); ok { + if nextLogNo != currentLogNo+1 { + return ctx, fmt.Errorf("log order is not correct, current logNo: %d, next logNo: %d", currentLogNo, nextLogNo) + } + currentLogNo = nextLogNo + } + continue + } + } + } return ctx, nil } + +func getLogNoFromLog(log *protocol.Log) (int, bool) { + for _, content := range log.Contents { + if content.Key == "logNo" { + logNo, err := strconv.Atoi(content.Value) + if err != nil { + return 0, false + } + return logNo, true + } + } + return 0, false +} diff --git a/test/go.mod b/test/go.mod index 2aa11b458a..7619611a58 100644 --- a/test/go.mod +++ b/test/go.mod @@ -133,4 +133,4 @@ replace ( github.com/alibaba/ilogtail => ../ github.com/alibaba/ilogtail/pkg => ../pkg github.com/google/cadvisor v0.49.1 => github.com/google/cadvisor v0.49.0 -) +) \ No newline at end of file