From ca52be6e60f0786a1de6684112e30e6333492694 Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Wed, 28 Feb 2024 08:55:47 -0500 Subject: [PATCH 1/9] :sparkles: move container utils to a package Signed-off-by: Pranav Gaikwad --- cmd/analyze.go | 63 ++++++++++++++++------------- cmd/openrewrite.go | 15 ++++--- cmd/shimconvert.go | 17 ++++---- {cmd => pkg/container}/container.go | 61 +++++++++++++++++++--------- 4 files changed, 96 insertions(+), 60 deletions(-) rename {cmd => pkg/container}/container.go (72%) diff --git a/cmd/analyze.go b/cmd/analyze.go index f30f675..f3c657c 100644 --- a/cmd/analyze.go +++ b/cmd/analyze.go @@ -18,6 +18,7 @@ import ( "github.com/go-logr/logr" "github.com/konveyor-ecosystem/kantra/cmd/internal/hiddenfile" + "github.com/konveyor-ecosystem/kantra/pkg/container" "github.com/konveyor/analyzer-lsp/engine" outputv1 "github.com/konveyor/analyzer-lsp/output/v1/konveyor" "github.com/konveyor/analyzer-lsp/provider" @@ -301,13 +302,15 @@ func (a *analyzeCommand) ListLabels(ctx context.Context) error { } else { args = append(args, "--list-targets") } - err = NewContainer(a.log).Run( + err = container.NewContainer().Run( ctx, - WithEnv(runMode, runModeContainer), - WithVolumes(volumes), - WithEntrypointBin(fmt.Sprintf("/usr/local/bin/%s", Settings.RootCommandName)), - WithEntrypointArgs(args...), - WithCleanup(a.cleanup), + container.WithImage(Settings.RunnerImage), + container.WithLog(a.log.V(1)), + container.WithEnv(runMode, runModeContainer), + container.WithVolumes(volumes), + container.WithEntrypointBin(fmt.Sprintf("/usr/local/bin/%s", Settings.RootCommandName)), + container.WithEntrypointArgs(args...), + container.WithCleanup(a.cleanup), ) if err != nil { a.log.Error(err, "failed listing labels") @@ -741,14 +744,16 @@ func (a *analyzeCommand) RunAnalysis(ctx context.Context, xmlOutputDir string) e "input", a.input, "output", a.output, "args", strings.Join(args, " "), "volumes", volumes) a.log.Info("generating analysis log in file", "file", analysisLogFilePath) // TODO (pgaikwad): run analysis & deps in parallel - err = NewContainer(a.log).Run( + err = container.NewContainer().Run( ctx, - WithVolumes(volumes), - WithStdout(analysisLog), - WithStderr(analysisLog), - WithEntrypointArgs(args...), - WithEntrypointBin("/usr/bin/entrypoint.sh"), - WithCleanup(a.cleanup), + container.WithImage(Settings.RunnerImage), + container.WithLog(a.log.V(1)), + container.WithVolumes(volumes), + container.WithStdout(analysisLog), + container.WithStderr(analysisLog), + container.WithEntrypointArgs(args...), + container.WithEntrypointBin("/usr/bin/entrypoint.sh"), + container.WithCleanup(a.cleanup), ) if err != nil { return err @@ -838,16 +843,18 @@ func (a *analyzeCommand) GenerateStaticReport(ctx context.Context) error { joinedArgs := strings.Join(args, " ") staticReportCmd := []string{joinedArgs} - container := NewContainer(a.log) + c := container.NewContainer() a.log.Info("generating static report", "output", a.output, "args", strings.Join(staticReportCmd, " ")) - err := container.Run( + err := c.Run( ctx, - WithEntrypointBin("/bin/sh"), - WithEntrypointArgs(staticReportCmd...), - WithVolumes(volumes), - WithcFlag(true), - WithCleanup(a.cleanup), + container.WithImage(Settings.RunnerImage), + container.WithLog(a.log.V(1)), + container.WithEntrypointBin("/bin/sh"), + container.WithEntrypointArgs(staticReportCmd...), + container.WithVolumes(volumes), + container.WithcFlag(true), + container.WithCleanup(a.cleanup), ) if err != nil { return err @@ -1011,14 +1018,16 @@ func (a *analyzeCommand) ConvertXML(ctx context.Context) (string, error) { a.log.Info("running windup shim", "output", a.output, "args", strings.Join(args, " "), "volumes", volumes) a.log.Info("generating shim log in file", "file", shimLogPath) - err = NewContainer(a.log).Run( + err = container.NewContainer().Run( ctx, - WithStdout(shimLog), - WithStderr(shimLog), - WithVolumes(volumes), - WithEntrypointArgs(args...), - WithEntrypointBin("/usr/local/bin/windup-shim"), - WithCleanup(a.cleanup), + container.WithImage(Settings.RunnerImage), + container.WithLog(a.log.V(1)), + container.WithStdout(shimLog), + container.WithStderr(shimLog), + container.WithVolumes(volumes), + container.WithEntrypointArgs(args...), + container.WithEntrypointBin("/usr/local/bin/windup-shim"), + container.WithCleanup(a.cleanup), ) if err != nil { return "", err diff --git a/cmd/openrewrite.go b/cmd/openrewrite.go index b9e840d..f0445a9 100644 --- a/cmd/openrewrite.go +++ b/cmd/openrewrite.go @@ -8,6 +8,7 @@ import ( "strings" "github.com/go-logr/logr" + "github.com/konveyor-ecosystem/kantra/pkg/container" "github.com/spf13/cobra" ) @@ -151,13 +152,15 @@ func (o *openRewriteCommand) Run(ctx context.Context) error { args = append(args, "-s", o.mavenSettingsFile) } - err := NewContainer(o.log).Run( + err := container.NewContainer().Run( ctx, - WithEntrypointArgs(args...), - WithEntrypointBin("/usr/bin/openrewrite_entrypoint.sh"), - WithVolumes(volumes), - WithWorkDir("/tmp/source-app/input"), - WithCleanup(o.cleanup), + container.WithImage(Settings.RunnerImage), + container.WithLog(o.log.V(1)), + container.WithEntrypointArgs(args...), + container.WithEntrypointBin("/usr/bin/openrewrite_entrypoint.sh"), + container.WithVolumes(volumes), + container.WithWorkDir("/tmp/source-app/input"), + container.WithCleanup(o.cleanup), ) if err != nil { o.log.V(1).Error(err, "error running openrewrite") diff --git a/cmd/shimconvert.go b/cmd/shimconvert.go index 87734e8..9f10cd8 100644 --- a/cmd/shimconvert.go +++ b/cmd/shimconvert.go @@ -10,6 +10,7 @@ import ( "strings" "github.com/go-logr/logr" + "github.com/konveyor-ecosystem/kantra/pkg/container" "github.com/spf13/cobra" "golang.org/x/exp/maps" ) @@ -162,14 +163,16 @@ func (w *windupShimCommand) Run(ctx context.Context) error { w.log.Info("running windup-shim convert command", "args", strings.Join(args, " "), "volumes", volumes, "output", w.output, "inputs", strings.Join(w.input, ",")) w.log.Info("generating shim log in file", "file", shimLogPath) - err = NewContainer(w.log).Run( + err = container.NewContainer().Run( ctx, - WithVolumes(volumes), - WithStdout(shimLog), - WithStderr(shimLog), - WithEntrypointArgs(args...), - WithEntrypointBin("/usr/local/bin/windup-shim"), - WithCleanup(w.cleanup), + container.WithImage(Settings.RunnerImage), + container.WithLog(w.log.V(1)), + container.WithVolumes(volumes), + container.WithStdout(shimLog), + container.WithStderr(shimLog), + container.WithEntrypointArgs(args...), + container.WithEntrypointBin("/usr/local/bin/windup-shim"), + container.WithCleanup(w.cleanup), ) if err != nil { w.log.V(1).Error(err, "failed to run convert command") diff --git a/cmd/container.go b/pkg/container/container.go similarity index 72% rename from cmd/container.go rename to pkg/container/container.go index 6e8b00e..1dac315 100644 --- a/cmd/container.go +++ b/pkg/container/container.go @@ -1,4 +1,4 @@ -package cmd +package container import ( "bytes" @@ -29,9 +29,11 @@ type container struct { // whether to delete container after run() cleanup bool // map of source -> dest paths to mount - volumes map[string]string - cFlag bool - log logr.Logger + volumes map[string]string + cFlag bool + log logr.Logger + containerRuntimeBin string + reproducerCmd *string } type Option func(c *container) @@ -102,6 +104,18 @@ func WithEnv(k string, v string) Option { } } +func WithLog(l logr.Logger) Option { + return func(c *container) { + c.log = l + } +} + +func WithReproduceCmd(r *string) Option { + return func(c *container) { + c.reproducerCmd = r + } +} + func randomName() string { rand.Seed(int64(time.Now().Nanosecond())) charset := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" @@ -112,19 +126,20 @@ func randomName() string { return string(b) } -func NewContainer(log logr.Logger) *container { +func NewContainer() *container { return &container{ - image: Settings.RunnerImage, - entrypointArgs: []string{}, - volumes: make(map[string]string), - stdout: []io.Writer{os.Stdout}, - env: map[string]string{}, - stderr: []io.Writer{os.Stderr}, - name: randomName(), + image: "", + containerRuntimeBin: "podman", + entrypointArgs: []string{}, + volumes: make(map[string]string), + stdout: []io.Writer{os.Stdout}, + env: map[string]string{}, + stderr: []io.Writer{os.Stderr}, + name: randomName(), // by default, remove the container after run() cleanup: true, cFlag: false, - log: log, + log: logr.Discard(), } } @@ -133,6 +148,9 @@ func (c *container) Run(ctx context.Context, opts ...Option) error { for _, opt := range opts { opt(c) } + if c.image == "" || c.containerRuntimeBin == "" { + return fmt.Errorf("image and containerRuntimeBin must be set") + } args := []string{"run"} os := runtime.GOOS if c.cleanup { @@ -171,7 +189,10 @@ func (c *container) Run(ctx context.Context, opts ...Option) error { if len(c.entrypointArgs) > 0 { args = append(args, c.entrypointArgs...) } - cmd := exec.CommandContext(ctx, Settings.PodmanBinary, args...) + if c.reproducerCmd != nil { + *c.reproducerCmd = fmt.Sprintf("%s %s", c.containerRuntimeBin, strings.Join(args, " ")) + } + cmd := exec.CommandContext(ctx, c.containerRuntimeBin, args...) errBytes := &bytes.Buffer{} cmd.Stdout = nil cmd.Stderr = errBytes @@ -182,11 +203,11 @@ func (c *container) Run(ctx context.Context, opts ...Option) error { cmd.Stderr = io.MultiWriter( append(c.stderr, errBytes)...) } - c.log.V(1).Info("executing podman command", - "podman", Settings.PodmanBinary, "cmd", c.entrypointBin, "args", strings.Join(args, " ")) + c.log.Info("executing podman command", + "podman", c.containerRuntimeBin, "cmd", c.entrypointBin, "args", strings.Join(args, " ")) err = cmd.Run() if err != nil { - c.log.V(1).Error(err, "container run error") + c.log.Error(err, "container run error") if _, ok := err.(*exec.ExitError); ok { return fmt.Errorf(errBytes.String()) } @@ -198,9 +219,9 @@ func (c *container) Run(ctx context.Context, opts ...Option) error { func (c *container) Rm(ctx context.Context) error { cmd := exec.CommandContext( ctx, - Settings.PodmanBinary, + c.containerRuntimeBin, "rm", c.name) - c.log.V(1).Info("removing container", - "podman", Settings.PodmanBinary, "name", c.name) + c.log.Info("removing container", + "podman", c.containerRuntimeBin, "name", c.name) return cmd.Run() } From a3e265aefbdff85f4105122f87ea766fcee6f8b0 Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Wed, 28 Feb 2024 08:57:24 -0500 Subject: [PATCH 2/9] :sparkles: add testing API and schemas Signed-off-by: Pranav Gaikwad --- pkg/testing/schema.go | 77 +++++++++ pkg/testing/schema_test.go | 28 ++++ pkg/testing/test.go | 334 +++++++++++++++++++++++++++++++++++++ pkg/testing/test_test.go | 219 ++++++++++++++++++++++++ test-schema.json | 127 ++++++++++++++ 5 files changed, 785 insertions(+) create mode 100644 pkg/testing/schema.go create mode 100644 pkg/testing/schema_test.go create mode 100644 pkg/testing/test.go create mode 100644 pkg/testing/test_test.go create mode 100755 test-schema.json diff --git a/pkg/testing/schema.go b/pkg/testing/schema.go new file mode 100644 index 0000000..07650d1 --- /dev/null +++ b/pkg/testing/schema.go @@ -0,0 +1,77 @@ +package testing + +import ( + "fmt" + "reflect" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/openapi3gen" + "github.com/konveyor/analyzer-lsp/provider" +) + +var t = &TestsFile{} +var cv = &CountBasedVerification{} +var lv = &LocationBasedVerification{} + +func GenerateTestsSchema() (*openapi3.SchemaRef, error) { + schemas := make(openapi3.Schemas) + generator := openapi3gen.NewGenerator( + openapi3gen.SchemaCustomizer(testsSchemaCustomizer)) + return generator.NewSchemaRefForValue(t, schemas) +} + +func testsSchemaCustomizer(name string, t reflect.Type, tag reflect.StructTag, schema *openapi3.Schema) error { + switch name { + case "tests": + if schema.Type == "object" { + schema.Required = append(schema.Required, "ruleID") + } + case "testCases": + if schema.Type == "object" { + schema.Required = append(schema.Required, "name") + } + case "providers": + if schema.Type == "object" { + schema.Required = append(schema.Required, "name") + schema.Required = append(schema.Required, "dataPath") + } else { + schema.Nullable = true + } + case "hasIncidents": + generator := openapi3gen.NewGenerator( + openapi3gen.SchemaCustomizer(testsSchemaCustomizer)) + schemas := make(openapi3.Schemas) + countBasedSchema, err := generator.NewSchemaRefForValue(cv, schemas) + if err != nil { + return err + } + locationBasedSchema, err := generator.NewSchemaRefForValue(lv, schemas) + if err != nil { + return err + } + // handle inline properties correctly + delete(schema.Properties, "CountBased") + delete(schema.Properties, "LocationBased") + merge(schema.Properties, locationBasedSchema.Value.Properties) + merge(schema.Properties, countBasedSchema.Value.Properties) + schema.Nullable = true + case "locations": + if schema.Type == "object" { + schema.Required = append(schema.Required, "lineNumber") + schema.Required = append(schema.Required, "fileURI") + } else { + schema.Nullable = true + } + case "atLeast", "atMost", "exactly", "hasTags": + schema.Nullable = true + case "mode": + schema.Pattern = fmt.Sprintf("(%s|%s)", provider.FullAnalysisMode, provider.SourceOnlyAnalysisMode) + } + return nil +} + +func merge(p1, p2 openapi3.Schemas) { + for k, v := range p2 { + p1[k] = v + } +} diff --git a/pkg/testing/schema_test.go b/pkg/testing/schema_test.go new file mode 100644 index 0000000..4905f93 --- /dev/null +++ b/pkg/testing/schema_test.go @@ -0,0 +1,28 @@ +package testing + +import ( + "encoding/json" + "os" + "testing" +) + +func TestGenerateTestsSchema(t *testing.T) { + got, err := GenerateTestsSchema() + if err != nil { + t.Errorf("GenerateTestsSchema() error = %v", err) + return + } + wantContent, err := os.ReadFile("../../test-schema.json") + if err != nil { + t.Errorf("failed reading expected schema file ../schema.json") + return + } + gotContent, err := json.MarshalIndent(got, "", "\t") + if err != nil { + t.Errorf("failed unmarshaling expected schema file ../../test-schema.json") + return + } + if string(gotContent) != string(wantContent) { + t.Errorf("GenerateTestsSchema() want schema \n%v, got \n%v", string(gotContent), string(wantContent)) + } +} diff --git a/pkg/testing/test.go b/pkg/testing/test.go new file mode 100644 index 0000000..ee10987 --- /dev/null +++ b/pkg/testing/test.go @@ -0,0 +1,334 @@ +package testing + +import ( + "fmt" + "path/filepath" + "reflect" + "regexp" + "strings" + + "github.com/konveyor/analyzer-lsp/output/v1/konveyor" + "github.com/konveyor/analyzer-lsp/provider" +) + +const ( + RULESET_TEST_CONFIG_GOLDEN_FILE = "testing-config.yaml" +) + +type TestsFile struct { + // RulesPath is an optional path to respective rules file + RulesPath string `yaml:"rulesPath,omitempty" json:"rulesPath,omitempty"` + // Providers is a list of configs with each item containing config specific to a provider + Providers []ProviderConfig `yaml:"providers,omitempty" json:"providers,omitempty"` + // Tests is a list of tests with each item defining one or more test cases specific to a rule + Tests []Test `yaml:"tests,omitempty" json:"tests,omitempty"` + Path string `yaml:"-" json:"-"` +} + +type ProviderConfig struct { + // Name is the name of the provider this config applies to + Name string `yaml:"name" json:"name"` + // DataPath is a relative path to test data to be used for this provider + DataPath string `yaml:"dataPath" json:"dataPath"` +} + +type Test struct { + // RuleID is the ID of the rule this test applies to + RuleID string `yaml:"ruleID" json:"ruleID"` + // TestCases is a list of distinct test cases for this rule + TestCases []TestCase `yaml:"testCases" json:"testCases"` +} + +type TestCase struct { + // Name is a unique name for this test case + Name string `yaml:"name" json:"name"` + // AnalysisParams is analysis parameters to be used when running this test case + AnalysisParams `yaml:"analysisParams,omitempty" json:"analysisParams,omitempty"` + // IsUnmatched passes test case when the rule is not matched + IsUnmatched bool `yaml:"isUnmatched,omitempty" json:"isUnmatched,omitempty"` + // HasIncidents defines criteria to pass the test case based on incidents for this rule + HasIncidents *IncidentVerification `yaml:"hasIncidents,omitempty" json:"hasIncidents,omitempty"` + // HasTags passes test case when all of the given tags are generated + HasTags []string `yaml:"hasTags,omitempty" json:"hasTags,omitempty"` + RuleID string `yaml:"-" json:"-"` +} + +type AnalysisParams struct { + // Mode analysis mode to use when running the test, one of - source-only, full + Mode provider.AnalysisMode `yaml:"mode,omitempty" json:"mode,omitempty"` + // DepLabelSelector dependency label selector to use when running the test + DepLabelSelector string `yaml:"depLabelSelector,omitempty" json:"depLabelSelector,omitempty"` +} + +// IncidentVerification defines criterias to pass a test case. +// Only one of CountBased or LocationBased can be defined at a time. +type IncidentVerification struct { + // CountBased defines a simple test case passing criteria based on count of incidents + CountBased *CountBasedVerification `yaml:",inline,omitempty" json:",inline,omitempty"` + // LocationBased defines a detailed test case passing criteria based on each incident + LocationBased *LocationBasedVerification `yaml:",inline,omitempty" json:",inline,omitempty"` +} + +func (i *IncidentVerification) MarshalYAML() (interface{}, error) { + if i.CountBased != nil { + return i.CountBased, nil + } + return i.LocationBased, nil +} + +// CountBasedVerification defines test case passing criteria based on count of incidents. +// Only one of exactly, atLeast, or atMost can be defined at a time. +type CountBasedVerification struct { + // Exactly pass test case when there are exactly this many incidents + Exactly *int `yaml:"exactly,omitempty" json:"exactly,omitempty"` + // AtLeast pass test case when there are this many or more incidents + AtLeast *int `yaml:"atLeast,omitempty" json:"atLeast,omitempty"` + // AtMost pass test case when there are no more than this many incidents + AtMost *int `yaml:"atMost,omitempty" json:"atMost,omitempty"` + // MessageMatches pass test case when all incidents contain this message + MessageMatches *string `yaml:"messageMatches,omitempty" json:"messageMatches,omitempty"` + // CodeSnipMatches pass test case when all incidents contain this code snip + CodeSnipMatches *string `yaml:"codeSnipMatches,omitempty" json:"codeSnipMatches,omitempty"` +} + +type LocationBasedVerification struct { + // Locations defines detailed conditions for each incident + Locations []LocationVerification `yaml:"locations" json:"locations"` +} + +// LocationVerification defines test case passing criteria based on detailed information in an incident. +// FileURI and LineNumber are required. +type LocationVerification struct { + // FileURI is the file in which incident is supposed to be found + FileURI *string `yaml:"fileURI,omitempty" json:"fileURI,omitempty"` + // LineNumber is the line number where incident is supposed to be found + LineNumber *int `yaml:"lineNumber,omitempty" json:"lineNumber,omitempty"` + // MessageMatches is the message that's supposed to be contained within the message of this incident + MessageMatches *string `yaml:"messageMatches,omitempty" json:"messageMatches,omitempty"` + // CodeSnipMatches is the code snippet which is supposed to be present within the codeSnip of this incident + CodeSnipMatches *string `yaml:"codeSnipMatches,omitempty" json:"codeSnipMatches,omitempty"` +} + +func (t TestsFile) Validate() error { + for idx, prov := range t.Providers { + if err := prov.Validate(); err != nil { + return fmt.Errorf("providers[%d] - %s", idx, err.Error()) + } + } + for _, test := range t.Tests { + if err := test.Validate(); err != nil { + return fmt.Errorf("%s#%s", test.RuleID, err.Error()) + } + } + return nil +} + +func (p ProviderConfig) Validate() error { + if p.Name == "" { + return fmt.Errorf("'name' cannot be empty") + } + if p.DataPath == "" { + return fmt.Errorf("'dataPath' cannot be empty") + } + return nil +} + +func (t Test) Validate() error { + if t.RuleID == "" { + return fmt.Errorf("'ruleID' cannot be empty") + } + for _, tc := range t.TestCases { + if err := tc.Validate(); err != nil { + return fmt.Errorf("%s - %s", tc.Name, err.Error()) + } + } + return nil +} + +func (t TestCase) Validate() error { + if t.HasIncidents != nil { + if err := t.HasIncidents.Validate(); err != nil { + return err + } + } + if t.Name == "" { + return fmt.Errorf("'name' cannot be empty") + } + return nil +} + +func (a AnalysisParams) Validate() error { + if a.Mode != "" && a.Mode != provider.FullAnalysisMode && + a.Mode != provider.SourceOnlyAnalysisMode { + return fmt.Errorf("mode must be either %s or %s", + provider.FullAnalysisMode, provider.SourceOnlyAnalysisMode) + } + return nil +} + +func (t IncidentVerification) Validate() error { + if t.CountBased == nil && t.LocationBased == nil { + return fmt.Errorf( + "exactly one of the following properties of hasIncidents must be defined - 'exactly', 'atLeast', 'atMost' or 'locations'") + } + if t.CountBased != nil && t.LocationBased != nil { + return fmt.Errorf( + "properties 'exactly', 'atLeast', 'atMost' and 'locations' are mutually exclusive") + } + if t.LocationBased != nil { + if t.LocationBased.Locations == nil { + return fmt.Errorf( + "at least one location must be defined under 'hasIncidents.locations'") + } else { + for idx, loc := range t.LocationBased.Locations { + err := loc.Validate() + if err != nil { + return fmt.Errorf("locations[%d] - %s", idx, err.Error()) + } + } + } + + } + if t.CountBased != nil { + total := 0 + if t.CountBased.AtLeast != nil { + total += 1 + } + if t.CountBased.AtMost != nil { + total += 1 + } + if t.CountBased.Exactly != nil { + total += 1 + } + if total > 1 { + return fmt.Errorf("properties 'exactly', 'atMost', 'atLeast' are mutually exclusive") + } + } + return nil +} + +func (l LocationVerification) Validate() error { + if l.FileURI == nil { + return fmt.Errorf("'hasIncidents.fileURI' must be defined") + } + if l.LineNumber == nil { + return fmt.Errorf("'lineNumber' must be defined") + } + return nil +} + +func (t TestCase) Verify(output konveyor.RuleSet) []string { + failures := []string{} + violation, violationExists := output.Violations[t.RuleID] + existsInUnmatched := false + for _, unmatchd := range output.Unmatched { + if unmatchd == t.RuleID { + existsInUnmatched = true + } + } + if t.IsUnmatched && (violationExists || !existsInUnmatched) { + failures = append(failures, "expected rule to not match but matched") + return failures + } + if !t.IsUnmatched && existsInUnmatched { + failures = append(failures, "expected rule to match but unmatched") + return failures + } + for _, expectedTag := range t.HasTags { + found := false + for _, foundTag := range output.Tags { + if foundTag == expectedTag { + found = true + break + } + if r, err := regexp.Compile(expectedTag); err == nil && r.MatchString(foundTag) { + found = true + break + } + } + if !found { + failures = append(failures, fmt.Sprintf("expected tag %s not found", expectedTag)) + } + } + + compareMessageOrCodeSnip := func(with string, pattern string) bool { + if r, err := regexp.Compile(pattern); err == nil && + !r.MatchString(with) { + return false + } + if !strings.Contains(with, pattern) { + return false + } + return true + } + + if t.HasIncidents != nil { + countBased := t.HasIncidents.CountBased + locationBased := t.HasIncidents.LocationBased + + if locationBased != nil { + for _, loc := range t.HasIncidents.LocationBased.Locations { + foundIncidentsInFile := []konveyor.Incident{} + for idx := range violation.Incidents { + incident := &violation.Incidents[idx] + if strings.HasSuffix(string(incident.URI), filepath.Clean(*loc.FileURI)) { + foundIncidentsInFile = append(foundIncidentsInFile, *incident) + } + } + if len(foundIncidentsInFile) == 0 { + failures = append(failures, fmt.Sprintf("expected incident in file %s not found", filepath.Clean(*loc.FileURI))) + continue + } + foundIncident := konveyor.Incident{} + lineNumberFound := false + for _, inc := range foundIncidentsInFile { + if reflect.DeepEqual(inc.LineNumber, loc.LineNumber) { + lineNumberFound = true + foundIncident = inc + break + } + } + if !lineNumberFound { + failures = append(failures, + fmt.Sprintf("expected incident in %s on line number %d not found", + *loc.FileURI, *loc.LineNumber)) + continue + } + if loc.CodeSnipMatches != nil { + if !compareMessageOrCodeSnip(foundIncident.CodeSnip, *loc.CodeSnipMatches) { + failures = append(failures, fmt.Sprintf( + "expected code snip to match pattern `%s`, got `%s`", + *loc.CodeSnipMatches, foundIncident.CodeSnip)) + continue + } + } + if loc.MessageMatches != nil { + if !compareMessageOrCodeSnip(foundIncident.Message, *loc.MessageMatches) { + failures = append(failures, fmt.Sprintf( + "expected code snip to match pattern `%s`, got `%s`", + *loc.MessageMatches, foundIncident.Message)) + continue + } + } + } + } + if countBased != nil { + if countBased.Exactly != nil && *countBased.Exactly != len(violation.Incidents) { + return append(failures, + fmt.Sprintf("expected exactly %d incidents, got %d", + *countBased.Exactly, len(violation.Incidents))) + } + if countBased.AtLeast != nil && *countBased.AtLeast > len(violation.Incidents) { + return append(failures, + fmt.Sprintf("expected at least %d incidents, got %d", + *countBased.AtLeast, len(violation.Incidents))) + } + if countBased.AtMost != nil && *countBased.AtMost < len(violation.Incidents) { + return append(failures, + fmt.Sprintf("expected at most %d incidents, got %d", + *countBased.AtMost, len(violation.Incidents))) + } + } + } + return failures +} diff --git a/pkg/testing/test_test.go b/pkg/testing/test_test.go new file mode 100644 index 0000000..6226bff --- /dev/null +++ b/pkg/testing/test_test.go @@ -0,0 +1,219 @@ +package testing + +import ( + "testing" + + "github.com/konveyor/analyzer-lsp/output/v1/konveyor" + "go.lsp.dev/uri" +) + +func TestTestCase_Verify(t *testing.T) { + two := int(2) + three := int(3) + testFileUri := "test" + mountedUri := "file:///data/test/sample.xml" + localUri := "./test/sample.xml" + tests := []struct { + name string + testCase TestCase + output konveyor.RuleSet + wantErrors int + }{ + { + name: "tc checks if a rule is not matched", + testCase: TestCase{ + RuleID: "rule", + IsUnmatched: true, + }, + output: konveyor.RuleSet{Unmatched: []string{"rule"}}, + wantErrors: 0, + }, + { + name: "tc checks if a tag is present", + testCase: TestCase{ + HasTags: []string{"Python"}, + }, + output: konveyor.RuleSet{Tags: []string{"Python"}}, + wantErrors: 0, + }, + { + name: "tc checks if a tag is present - negative", + testCase: TestCase{ + HasTags: []string{"Python"}, + }, + output: konveyor.RuleSet{Tags: []string{}}, + wantErrors: 1, + }, + { + name: "tc uses exactly constraint", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + CountBased: &CountBasedVerification{ + Exactly: &one, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: "test", Message: "test", CodeSnip: "test"}, + }, + }, + }}, + wantErrors: 0, + }, + { + name: "tc uses exactly constraint - negative", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + CountBased: &CountBasedVerification{ + Exactly: &one, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: "test", Message: "test", CodeSnip: "test"}, + {URI: "test", Message: "test", CodeSnip: "test"}, + }, + }, + }}, + wantErrors: 1, + }, + { + name: "tc uses atLeast constraint", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + CountBased: &CountBasedVerification{ + AtLeast: &one, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: "test", Message: "test", CodeSnip: "test"}, + {URI: "test", Message: "test", CodeSnip: "test"}, + }, + }, + }}, + wantErrors: 0, + }, + { + name: "tc uses atLeast constraint - negative", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + CountBased: &CountBasedVerification{ + AtLeast: &two, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: "test", Message: "test", CodeSnip: "test"}, + }, + }, + }}, + wantErrors: 1, + }, + { + name: "tc uses atMost constraint", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + CountBased: &CountBasedVerification{ + AtMost: &one, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: "test", Message: "test", CodeSnip: "test"}, + }, + }, + }}, + wantErrors: 0, + }, + { + name: "tc uses atMost constraint - negative", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + CountBased: &CountBasedVerification{ + AtMost: &two, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: "test", Message: "test", CodeSnip: "test"}, + {URI: "test", Message: "test", CodeSnip: "test"}, + {URI: "test", Message: "test", CodeSnip: "test"}, + }, + }, + }}, + wantErrors: 1, + }, + { + name: "tc uses locationBased constraint", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + LocationBased: &LocationBasedVerification{ + Locations: []LocationVerification{ + {FileURI: &testFileUri, LineNumber: &one}, + {FileURI: &testFileUri, LineNumber: &three, MessageMatches: &testFileUri, CodeSnipMatches: &testFileUri}, + }, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: "test", LineNumber: &one}, + {URI: "test", LineNumber: &three, Message: "test", CodeSnip: "test"}, + }, + }, + }}, + wantErrors: 0, + }, + { + name: "tc uses locationBased constraint and has a different file URI coming from the container", + testCase: TestCase{ + RuleID: "rule", + HasIncidents: &IncidentVerification{ + LocationBased: &LocationBasedVerification{ + Locations: []LocationVerification{ + {FileURI: &localUri, LineNumber: &one}, + }, + }, + }, + }, + output: konveyor.RuleSet{Violations: map[string]konveyor.Violation{ + "rule": { + Incidents: []konveyor.Incident{ + {URI: uri.URI(mountedUri), LineNumber: &one}, + }, + }, + }}, + wantErrors: 0, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tr := tt.testCase + if got := tr.Verify(tt.output); len(got) != tt.wantErrors { + t.Errorf("TestCase.Verify() = got verification error %v, want %v, errors %v", + len(got), tt.wantErrors, got) + } + }) + } +} diff --git a/test-schema.json b/test-schema.json new file mode 100755 index 0000000..602170d --- /dev/null +++ b/test-schema.json @@ -0,0 +1,127 @@ +{ + "properties": { + "providers": { + "items": { + "properties": { + "dataPath": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": [ + "name", + "dataPath" + ], + "type": "object" + }, + "nullable": true, + "type": "array" + }, + "rulesPath": { + "type": "string" + }, + "tests": { + "items": { + "properties": { + "ruleID": { + "type": "string" + }, + "testCases": { + "items": { + "properties": { + "analysisParams": { + "properties": { + "depLabelSelector": { + "type": "string" + }, + "mode": { + "pattern": "(full|source-only)", + "type": "string" + } + }, + "type": "object" + }, + "hasIncidents": { + "nullable": true, + "properties": { + "atLeast": { + "nullable": true, + "type": "integer" + }, + "atMost": { + "nullable": true, + "type": "integer" + }, + "codeSnipMatches": { + "type": "string" + }, + "exactly": { + "nullable": true, + "type": "integer" + }, + "locations": { + "items": { + "properties": { + "codeSnipMatches": { + "type": "string" + }, + "fileURI": { + "type": "string" + }, + "lineNumber": { + "type": "integer" + }, + "messageMatches": { + "type": "string" + } + }, + "required": [ + "lineNumber", + "fileURI" + ], + "type": "object" + }, + "nullable": true, + "type": "array" + }, + "messageMatches": { + "type": "string" + } + }, + "type": "object" + }, + "hasTags": { + "items": { + "nullable": true, + "type": "string" + }, + "nullable": true, + "type": "array" + }, + "isUnmatched": { + "type": "boolean" + }, + "name": { + "type": "string" + } + }, + "required": [ + "name" + ], + "type": "object" + }, + "type": "array" + } + }, + "required": [ + "ruleID" + ], + "type": "object" + }, + "type": "array" + } + }, + "type": "object" +} \ No newline at end of file From 0940a14f32a27f42aee3f16f5d3d52c4c65f03ab Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Wed, 28 Feb 2024 08:58:39 -0500 Subject: [PATCH 3/9] :sparkles: add test parser, runner and result printer Signed-off-by: Pranav Gaikwad --- pkg/testing/parse.go | 190 +++++++++++++ pkg/testing/parse_test.go | 437 ++++++++++++++++++++++++++++++ pkg/testing/result.go | 160 +++++++++++ pkg/testing/runner.go | 539 +++++++++++++++++++++++++++++++++++++ pkg/testing/runner_test.go | 44 +++ 5 files changed, 1370 insertions(+) create mode 100644 pkg/testing/parse.go create mode 100644 pkg/testing/parse_test.go create mode 100644 pkg/testing/result.go create mode 100644 pkg/testing/runner.go create mode 100644 pkg/testing/runner_test.go diff --git a/pkg/testing/parse.go b/pkg/testing/parse.go new file mode 100644 index 0000000..521e603 --- /dev/null +++ b/pkg/testing/parse.go @@ -0,0 +1,190 @@ +package testing + +import ( + "fmt" + "io/fs" + "os" + "path/filepath" + "sort" + "strings" + + "gopkg.in/yaml.v3" +) + +// TestsFilter filters in-out tests/test cases and returns final list of things to run +type TestsFilter interface { + Filter([]Test) []Test +} + +func Parse(paths []string, filter TestsFilter) ([]TestsFile, error) { + tests := []TestsFile{} + for _, path := range paths { + err := filepath.Walk(path, func(path string, info fs.FileInfo, e error) error { + if e != nil { + return e + } + if info.IsDir() { + return nil + } + if !strings.HasSuffix(info.Name(), ".test.yaml") && + !strings.HasSuffix(info.Name(), ".test.yml") { + return nil + } + // attempt tp parse ruleset level provider config + providerConfig := parseRulesetConfig( + filepath.Join(filepath.Dir(path), RULESET_TEST_CONFIG_GOLDEN_FILE)) + // parse the tests file + t, err := parseFile(path, filter) + if err != nil { + return fmt.Errorf("failed to load tests from path %s (%w)", path, err) + } + t.Path = path + if val, err := filepath.Abs(t.Path); err == nil { + t.Path = val + } + if t.RulesPath == "" { + t.RulesPath = strings.Replace(path, ".test.yaml", ".yaml", -1) + t.RulesPath = strings.Replace(t.RulesPath, ".test.yml", ".yml", -1) + if val, err := filepath.Abs(t.RulesPath); err == nil { + t.RulesPath = val + } + } else { + t.RulesPath = filepath.Join(filepath.Dir(path), t.RulesPath) + } + // merge ruleset level config with test specific config + t.Providers = mergeProviderConfig(providerConfig, t.Providers) + // validate + err = t.Validate() + if err != nil { + return fmt.Errorf("invalid tests file %s (%w)", path, err) + } + // apply filters + if filter != nil { + t.Tests = filter.Filter(t.Tests) + } + if len(t.Tests) == 0 { + // everything filtered out + return nil + } + tests = append(tests, t) + return nil + }) + if err != nil { + return nil, err + } + } + return tests, nil +} + +func parseRulesetConfig(path string) []ProviderConfig { + providerConfig := struct { + Providers []ProviderConfig `yaml:"providers" json:"providers"` + }{ + Providers: []ProviderConfig{}, + } + content, err := os.ReadFile(path) + if err != nil { + return providerConfig.Providers + } + err = yaml.Unmarshal(content, &providerConfig) + if err != nil { + return providerConfig.Providers + } + return providerConfig.Providers +} + +func parseFile(path string, f TestsFilter) (TestsFile, error) { + t := TestsFile{} + content, err := os.ReadFile(path) + if err != nil { + return t, err + } + err = yaml.Unmarshal(content, &t) + if err != nil { + return t, err + } + for idx := range t.Tests { + test := &t.Tests[idx] + for jdx := range test.TestCases { + tc := &test.TestCases[jdx] + tc.RuleID = test.RuleID + } + } + return t, nil +} + +// mergeProviderConfig merge values in p2 into p1, p2 takes precedance +func mergeProviderConfig(p1, p2 []ProviderConfig) []ProviderConfig { + merged := []ProviderConfig{} + seen := map[string]*ProviderConfig{} + for idx, conf := range p1 { + seen[conf.Name] = &p1[idx] + } + for idx, conf := range p2 { + if _, ok := seen[conf.Name]; ok { + seen[conf.Name].DataPath = conf.DataPath + } else { + seen[conf.Name] = &p2[idx] + } + } + for _, v := range seen { + merged = append(merged, *v) + } + // sorting for stability of unit tests + sort.Slice(merged, func(i, j int) bool { + return strings.Compare(merged[i].Name, merged[j].Name) < 0 + }) + return merged +} + +// NewInlineNameBasedFilter works on an input string containing a comma +// separated list of test names and test case names to include +func NewInlineNameBasedFilter(names string) TestsFilter { + if names == "" { + return &inlineNameBasedFilter{} + } + includedNames := map[string]interface{}{} + for _, val := range strings.Split(names, ",") { + if val != "" { + includedNames[val] = nil + } + } + return &inlineNameBasedFilter{ + includedNames: includedNames, + } +} + +type inlineNameBasedFilter struct { + includedNames map[string]interface{} +} + +func (i inlineNameBasedFilter) Filter(tests []Test) []Test { + if i.includedNames == nil { + return tests + } + filterTCs := func(tcs []TestCase) []TestCase { + filteredTCs := []TestCase{} + for _, tc := range tcs { + tcName := fmt.Sprintf("%s#%s", tc.RuleID, tc.Name) + if _, tcOk := i.includedNames[tcName]; tcOk { + filteredTCs = append(filteredTCs, tc) + } + } + return filteredTCs + } + filtered := []Test{} + for _, test := range tests { + if _, ok := i.includedNames[test.RuleID]; ok { + // entire test is included + filtered = append(filtered, test) + } else { + // one or more test cases in a test are included + filteredTest := test + filteredTest.TestCases = filterTCs(test.TestCases) + if len(filteredTest.TestCases) > 0 { + filtered = append(filtered, filteredTest) + } + } + } + return filtered +} diff --git a/pkg/testing/parse_test.go b/pkg/testing/parse_test.go new file mode 100644 index 0000000..e1e0c3b --- /dev/null +++ b/pkg/testing/parse_test.go @@ -0,0 +1,437 @@ +package testing + +import ( + "reflect" + "strings" + "testing" +) + +var one = int(1) +var seven = int(7) +var codeSnipOne = "file://common.properties" +var fileTwo = "./test-data/java/src/main/resources/persistence.properties" +var discoveryTests = TestsFile{ + Providers: []ProviderConfig{ + {Name: "builtin", DataPath: "./test-data/python/"}, + {Name: "java", DataPath: "./test-data/java/"}, + {Name: "python", DataPath: "./test-data/python/"}, + }, + RulesPath: "examples/ruleset/discovery.yaml", + Tests: []Test{ + { + RuleID: "language-discovery", + TestCases: []TestCase{{ + Name: "tc-00", + RuleID: "language-discovery", + HasTags: []string{"Python"}, + }}, + }, + { + RuleID: "kube-api-usage", + TestCases: []TestCase{ + { + Name: "tc-00", + RuleID: "language-discovery", + HasTags: []string{"Kubernetes"}, + }, + { + Name: "tc-01", + RuleID: "language-discovery", + HasIncidents: &IncidentVerification{ + CountBased: &CountBasedVerification{ + Exactly: &one, + }, + }, + }, + }, + }, + }, +} +var localStorageTests = TestsFile{ + Providers: []ProviderConfig{ + {Name: "builtin", DataPath: "./test-data/"}, + {Name: "java", DataPath: "./test-data/java/"}, + {Name: "python", DataPath: "./test-data/python/"}, + }, + RulesPath: "examples/ruleset/local-storage.yml", + Tests: []Test{ + { + RuleID: "storage-000", + TestCases: []TestCase{{ + Name: "tc-00", + RuleID: "storage-00", + HasIncidents: &IncidentVerification{ + LocationBased: &LocationBasedVerification{ + Locations: []LocationVerification{ + { + FileURI: &fileTwo, + LineNumber: &seven, + MessageMatches: &codeSnipOne, + CodeSnipMatches: &codeSnipOne, + }, + }, + }, + }, + }}, + }, + }, +} + +func TestParse(t *testing.T) { + tests := []struct { + name string + inputPaths []string + inputFilter TestsFilter + want []TestsFile + wantErr bool + }{ + { + name: "pass ruleset as input", + inputPaths: []string{ + "./examples/ruleset/", + }, + want: []TestsFile{ + discoveryTests, + localStorageTests, + }, + }, + { + name: "pass multiple test files as input", + inputPaths: []string{ + "./examples/ruleset/local-storage.test.yml", + "./examples/ruleset/discovery.test.yaml", + }, + want: []TestsFile{ + localStorageTests, + discoveryTests, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := Parse(tt.inputPaths, tt.inputFilter) + if (err != nil) != tt.wantErr { + t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr) + return + } + if len(got) != len(tt.want) { + t.Errorf("Parse() expected %d tests files, got %d", len(tt.want), len(got)) + return + } + for idx, gotTests := range got { + wantTests := tt.want[idx] + if !reflect.DeepEqual(gotTests.Providers, wantTests.Providers) { + t.Errorf("Parse() Tests[%d] expected provider config %v, got %v", idx, wantTests.Providers, gotTests.Providers) + return + } + if len(gotTests.Tests) != len(wantTests.Tests) { + t.Errorf("Parse() Tests[%d] expected total tests %d, got %d", idx, len(wantTests.Tests), len(gotTests.Tests)) + return + } + if !strings.Contains(gotTests.RulesPath, wantTests.RulesPath) { + t.Errorf("Parse() Tests[%d] expected RulesPath %s, got %s", idx, wantTests.RulesPath, gotTests.RulesPath) + return + } + for jdx, gotTest := range gotTests.Tests { + wantTest := wantTests.Tests[jdx] + if len(gotTest.TestCases) != len(wantTest.TestCases) { + t.Errorf("Parse() Tests[%d].Tests[%d] expected test cases %d, got %d", idx, jdx, len(wantTest.TestCases), len(gotTest.TestCases)) + return + } + + if wantTest.RuleID != gotTest.RuleID { + t.Errorf("Parse() Tests[%d].Tests[%d] expected ruleID %s, got %s", idx, jdx, wantTest.RuleID, gotTest.RuleID) + return + } + for kdx, gotTc := range gotTest.TestCases { + wantTc := wantTest.TestCases[kdx] + if !reflect.DeepEqual(wantTc.AnalysisParams, gotTc.AnalysisParams) { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected params %v, got %v", idx, jdx, kdx, wantTc.AnalysisParams, gotTc.AnalysisParams) + return + } + if wantTc.Name != gotTc.Name { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected name %s, got %s", idx, jdx, kdx, wantTc.Name, gotTc.Name) + return + } + if gotTc.HasIncidents != nil { + if wantTc.HasIncidents == nil { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected hasIncidents , got %v", idx, jdx, kdx, gotTc.HasIncidents) + return + } + if gotTc.HasIncidents.CountBased != nil { + if wantTc.HasIncidents.CountBased == nil { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected hasIncidents , got %v", idx, jdx, kdx, gotTc.HasIncidents.CountBased) + return + } + if !reflect.DeepEqual(gotTc.HasIncidents.CountBased, wantTc.HasIncidents.CountBased) { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected hasIncidents %v, got %v", idx, jdx, kdx, wantTc.HasIncidents.CountBased, gotTc.HasIncidents.CountBased) + return + } + } + if gotTc.HasIncidents.LocationBased != nil { + if wantTc.HasIncidents.LocationBased == nil { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected hasIncidents.locations , got %v", idx, jdx, kdx, gotTc.HasIncidents.LocationBased) + return + } + for ldx, gotLocation := range gotTc.HasIncidents.LocationBased.Locations { + wantLocation := wantTc.HasIncidents.LocationBased.Locations[ldx] + if !reflect.DeepEqual(gotLocation, wantLocation) { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected hasIncidents.locations[%d] %v, got %v", idx, jdx, kdx, ldx, wantLocation, gotLocation) + return + } + } + } + } + if !reflect.DeepEqual(gotTc.HasTags, wantTc.HasTags) { + t.Errorf("Parse() Tests[%d].Tests[%d].TestCases[%d] expected %v hasTags, got %v", idx, jdx, kdx, wantTc.HasTags, gotTc.HasTags) + return + } + } + } + } + }) + } +} + +func Test_mergeProviderConfig(t *testing.T) { + tests := []struct { + name string + mergeInto []ProviderConfig + mergeFrom []ProviderConfig + want []ProviderConfig + }{ + { + name: "mergeFrom must take precedance when conflicting values", + mergeInto: []ProviderConfig{ + { + DataPath: "./test/", + Name: "go", + }, + }, + mergeFrom: []ProviderConfig{ + { + DataPath: "./test/go/", + Name: "go", + }, + }, + want: []ProviderConfig{ + { + DataPath: "./test/go/", + Name: "go", + }, + }, + }, + { + name: "mergeInto has more items than mergeFrom, they should be kept as-is", + mergeInto: []ProviderConfig{ + { + DataPath: "./test/", + Name: "go", + }, + { + DataPath: "./test/", + Name: "builtin", + }, + }, + mergeFrom: []ProviderConfig{ + { + DataPath: "./test/go/", + Name: "go", + }, + }, + want: []ProviderConfig{ + { + DataPath: "./test/", + Name: "builtin", + }, + { + DataPath: "./test/go/", + Name: "go", + }, + }, + }, + { + name: "mergeFrom has more items than mergeInto, they should be kept as-is", + mergeInto: []ProviderConfig{ + { + DataPath: "./test/", + Name: "go", + }, + }, + mergeFrom: []ProviderConfig{ + { + DataPath: "./test/go/", + Name: "go", + }, + { + DataPath: "./test/", + Name: "builtin", + }, + }, + want: []ProviderConfig{ + { + DataPath: "./test/", + Name: "builtin", + }, + { + DataPath: "./test/go/", + Name: "go", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := mergeProviderConfig(tt.mergeInto, tt.mergeFrom); !reflect.DeepEqual(got, tt.want) { + t.Errorf("mergeProviderConfig() = %v, want %v", got, tt.want) + } + }) + } +} + +func NewTest(modifier ...func(t *Test)) Test { + t := Test{} + for _, fn := range modifier { + fn(&t) + } + for idx := range t.TestCases { + tc := &t.TestCases[idx] + tc.RuleID = t.RuleID + } + return t +} + +func WithTC(name string, modifiers ...func(tc *TestCase)) func(tc *Test) { + testCase := TestCase{Name: name} + for _, mod := range modifiers { + mod(&testCase) + } + return func(tc *Test) { + tc.TestCases = append(tc.TestCases, testCase) + } +} + +func WithRuleID(ruleID string) func(t *Test) { + return func(tc *Test) { + tc.RuleID = ruleID + } +} + +func WithAnalysisParams(a AnalysisParams) func(tc *TestCase) { + return func(tc *TestCase) { + tc.AnalysisParams = a + } +} + +func Test_inlineNameBasedFilter_Filter(t *testing.T) { + tests := []struct { + name string + filterString string + inputTests []Test + wantTests []Test + }{ + { + name: "filter string is empty, include everything", + filterString: "", + inputTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + }, + wantTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + }, + }, + { + name: "filter string specifies only a tc, include that test with only that tc", + filterString: "rule-000#tc-01", + inputTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + NewTest( + WithRuleID("rule-001"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + }, + wantTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-01"), + ), + }, + }, + { + name: "filter string has a test and a test case from that same test, include entire test", + filterString: "rule-000,rule-000#tc-01", + inputTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + NewTest( + WithRuleID("rule-001"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + }, + wantTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + }, + }, + { + name: "filter string has a test and a test case from another test", + filterString: "rule-000,rule-002#tc-00", + inputTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + NewTest( + WithRuleID("rule-001"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + NewTest( + WithRuleID("rule-002"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + }, + wantTests: []Test{ + NewTest( + WithRuleID("rule-000"), + WithTC("tc-00"), + WithTC("tc-01"), + ), + NewTest( + WithRuleID("rule-002"), + WithTC("tc-00"), + ), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := NewInlineNameBasedFilter(tt.filterString).Filter(tt.inputTests) + if !reflect.DeepEqual(tt.wantTests, got) { + t.Errorf("inlineNameBasedFilter.IncludeTest() = %v, want %v", got, tt.wantTests) + } + }) + } +} diff --git a/pkg/testing/result.go b/pkg/testing/result.go new file mode 100644 index 0000000..4a2424b --- /dev/null +++ b/pkg/testing/result.go @@ -0,0 +1,160 @@ +package testing + +import ( + "fmt" + "io" + "math" + "path/filepath" + "strings" +) + +// Result is a result of a test run +type Result struct { + Passed bool + TestsFilePath string + RuleID string + TestCaseName string + DebugInfo []string + FailureReasons []string + Error error +} + +// ResultPrinter is a function to print given results to a given place +type ResultPrinter func(io.WriteCloser, []Result) + +type summary struct { + total int + passed int +} + +// PrintSummary prints statistical summary from given results +func PrintSummary(w io.WriteCloser, results []Result) { + summaryByRules := map[string]*summary{} + tcSummary := summary{} + rulesSummary := summary{} + for _, result := range results { + if _, found := summaryByRules[result.RuleID]; !found { + summaryByRules[result.RuleID] = &summary{} + } + summaryByRules[result.RuleID].total += 1 + tcSummary.total += 1 + if len(result.FailureReasons) == 0 { + summaryByRules[result.RuleID].passed += 1 + tcSummary.passed += 1 + } + } + for _, summary := range summaryByRules { + rulesSummary.total += 1 + if summary.passed == summary.total { + rulesSummary.passed += 1 + } + } + fmt.Fprintln(w, strings.Repeat("-", 60)) + fmt.Fprintf(w, " Rules Summary: %d/%d (%.2f%%) PASSED\n", + rulesSummary.passed, rulesSummary.total, float64(rulesSummary.passed)/float64(rulesSummary.total)*100) + fmt.Fprintf(w, " Test Cases Summary: %d/%d (%.2f%%) PASSED\n", + tcSummary.passed, tcSummary.total, float64(tcSummary.passed)/float64(tcSummary.total)*100) + fmt.Fprintln(w, strings.Repeat("-", 60)) +} + +// PrintProgress prints detailed information from given results +func PrintProgress(w io.WriteCloser, results []Result) { + // results grouped by their tests files, then rules, then test cases + resultsByTestsFile := map[string]map[string][]Result{} + errorsByTestsFile := map[string][]string{} + justifyLen := 0 + maxInt := func(a ...int) int { + maxInt := math.MinInt64 + for _, n := range a { + maxInt = int(math.Max(float64(n), float64(maxInt))) + } + return maxInt + } + for _, result := range results { + if result.Error != nil { + if _, ok := errorsByTestsFile[result.TestsFilePath]; !ok { + errorsByTestsFile[result.TestsFilePath] = []string{} + } + errorsByTestsFile[result.TestsFilePath] = append(errorsByTestsFile[result.TestsFilePath], + result.Error.Error()) + continue + } + justifyLen = maxInt(justifyLen, + len(filepath.Base(result.TestsFilePath))+3, + len(result.RuleID)+4, len(result.TestCaseName)+6) + if _, ok := resultsByTestsFile[result.TestsFilePath]; !ok { + resultsByTestsFile[result.TestsFilePath] = map[string][]Result{} + } + if result.RuleID != "" { + if _, ok := resultsByTestsFile[result.TestsFilePath][result.RuleID]; !ok { + resultsByTestsFile[result.TestsFilePath][result.RuleID] = []Result{} + } + if result.TestCaseName != "" { + resultsByTestsFile[result.TestsFilePath][result.RuleID] = append( + resultsByTestsFile[result.TestsFilePath][result.RuleID], result) + } + } + } + report := []string{} + for testsFile, resultsByRule := range resultsByTestsFile { + totalTestsInFile := len(resultsByRule) + passedTestsInFile := 0 + testsFileReport := []string{} + testsFileSummary := fmt.Sprintf("- %s%s%%d/%%d PASSED", + filepath.Base(testsFile), strings.Repeat(" ", justifyLen-len(filepath.Base(testsFile))-2)) + testsReport := []string{} + for ruleID, resultsByTCs := range resultsByRule { + totalTestCasesInTest := len(resultsByTCs) + passedTestCasesInTest := 0 + testReport := []string{} + testSummary := fmt.Sprintf("%+2s %s%s%%d/%%d PASSED", + "-", ruleID, strings.Repeat(" ", justifyLen-len(ruleID)-3)) + testCaseReport := []string{} + for _, tcResult := range resultsByTCs { + if !tcResult.Passed { + reasons := []string{} + for _, reason := range tcResult.FailureReasons { + reasons = append(reasons, fmt.Sprintf("%+6s %s", "-", reason)) + } + for _, debugInfo := range tcResult.DebugInfo { + reasons = append(reasons, fmt.Sprintf("%+6s %s", "-", debugInfo)) + } + testCaseReport = append(testCaseReport, + fmt.Sprintf("%+4s %s%sFAILED", "-", + tcResult.TestCaseName, strings.Repeat(" ", justifyLen-len(tcResult.TestCaseName)-5))) + testCaseReport = append(testCaseReport, reasons...) + } else { + passedTestCasesInTest += 1 + } + } + if passedTestCasesInTest == totalTestCasesInTest { + passedTestsInFile += 1 + } + testReport = append(testReport, + fmt.Sprintf(testSummary, passedTestCasesInTest, totalTestCasesInTest)) + testReport = append(testReport, testCaseReport...) + testsReport = append(testsReport, testReport...) + } + testsFileReport = append(testsFileReport, + fmt.Sprintf(testsFileSummary, passedTestsInFile, totalTestsInFile)) + testsFileReport = append(testsFileReport, testsReport...) + report = append(report, testsFileReport...) + } + for testsFile, errs := range errorsByTestsFile { + errorReport := []string{fmt.Sprintf("- %s FAILED", filepath.Base(testsFile))} + for _, e := range errs { + errorReport = append(errorReport, fmt.Sprintf("%+2s %s", "-", e)) + } + report = append(report, errorReport...) + } + fmt.Fprintln(w, strings.Join(report, "\n")) +} + +func AnyFailed(results []Result) bool { + for _, res := range results { + if len(res.FailureReasons) > 0 { + return true + } + } + return false +} diff --git a/pkg/testing/runner.go b/pkg/testing/runner.go new file mode 100644 index 0000000..c0ad575 --- /dev/null +++ b/pkg/testing/runner.go @@ -0,0 +1,539 @@ +package testing + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + "os/exec" + "path" + "path/filepath" + "sort" + "strings" + "sync" + + "github.com/bombsimon/logrusr/v3" + "github.com/go-logr/logr" + "github.com/konveyor-ecosystem/kantra/pkg/container" + "github.com/konveyor/analyzer-lsp/output/v1/konveyor" + "github.com/konveyor/analyzer-lsp/provider" + "github.com/sirupsen/logrus" + "gopkg.in/yaml.v3" +) + +// Runner given a list of TestsFile and a TestOptions +// runs the tests, computes and returns results +type Runner interface { + Run([]TestsFile, TestOptions) ([]Result, error) +} + +type TestOptions struct { + TempDir string + LoudOutput bool + BaseProviderConfig []provider.Config + RunLocal bool + ContainerImage string + ProgressPrinter ResultPrinter +} + +// TODO (pgaikwad): we need to move the default config to a common place +// to be shared between kantra analyze command and this +var defaultProviderConfig = []provider.Config{ + { + Name: "java", + BinaryPath: "/jdtls/bin/jdtls", + InitConfig: []provider.InitConfig{ + { + AnalysisMode: provider.FullAnalysisMode, + ProviderSpecificConfig: map[string]interface{}{ + "bundles": "/jdtls/java-analyzer-bundle/java-analyzer-bundle.core/target/java-analyzer-bundle.core-1.0.0-SNAPSHOT.jar", + "depOpenSourceLabelsFile": "/usr/local/etc/maven.default.index", + provider.LspServerPathConfigKey: "/jdtls/bin/jdtls", + }, + }, + }, + }, + { + Name: "builtin", + InitConfig: []provider.InitConfig{{Location: ""}}, + }, + { + Name: "go", + BinaryPath: "/usr/bin/generic-external-provider", + InitConfig: []provider.InitConfig{ + { + AnalysisMode: provider.FullAnalysisMode, + ProviderSpecificConfig: map[string]interface{}{ + "lspServerName": "generic", + provider.LspServerPathConfigKey: "/root/go/bin/gopls", + "dependencyProviderPath": "/usr/bin/golang-dependency-provider", + }, + }, + }, + }, + { + Name: "python", + BinaryPath: "/usr/bin/generic-external-provider", + InitConfig: []provider.InitConfig{ + { + AnalysisMode: provider.FullAnalysisMode, + ProviderSpecificConfig: map[string]interface{}{ + "lspServerName": "pylsp", + provider.LspServerPathConfigKey: "/usr/local/bin/pylsp", + "workspaceFolders": []string{}, + "dependencyFolders": []string{}, + }, + }, + }, + }, + { + Name: "nodejs", + BinaryPath: "/usr/bin/generic-external-provider", + InitConfig: []provider.InitConfig{ + { + AnalysisMode: provider.FullAnalysisMode, + ProviderSpecificConfig: map[string]interface{}{ + "lspServerName": "nodejs", + provider.LspServerPathConfigKey: "/usr/local/bin/typescript-language-server", + "lspServerArgs": []string{"--stdio"}, + "workspaceFolders": []string{}, + "dependencyFolders": []string{}, + }, + }, + }, + }, + { + Name: "yaml", + BinaryPath: "/usr/bin/yq-external-provider", + InitConfig: []provider.InitConfig{ + { + AnalysisMode: provider.FullAnalysisMode, + ProviderSpecificConfig: map[string]interface{}{ + "name": "yq", + provider.LspServerPathConfigKey: "/usr/bin/yq", + }, + }, + }, + }, +} + +func NewRunner() Runner { + return defaultRunner{} +} + +// defaultRunner runs tests one file at a time +// groups tests within a file by analysisParams +type defaultRunner struct{} + +type workerInput struct { + testsFile TestsFile + opts TestOptions +} + +func (r defaultRunner) Run(testFiles []TestsFile, opts TestOptions) ([]Result, error) { + workerInputChan := make(chan workerInput, len(testFiles)) + resChan := make(chan []Result) + + wg := &sync.WaitGroup{} + + workerCount := 5 + // when running in container, we don't want to mount + // same base volumes concurrently in two different places + if !opts.RunLocal { + workerCount = 1 + } + // setup workers + for idx := 0; idx < workerCount; idx += 1 { + wg.Add(1) + go runWorker(wg, workerInputChan, resChan) + } + // send input + go func() { + for idx := range testFiles { + testFile := testFiles[idx] + workerInputChan <- workerInput{ + testsFile: testFile, + opts: opts, + } + } + close(workerInputChan) + }() + // wait for workers to finish + go func() { + wg.Wait() + close(resChan) + }() + // process results + results := []Result{} + anyFailed := false + anyErrored := false + // sorting for stability of unit tests + defer sort.Slice(results, func(i, j int) bool { + return strings.Compare(results[i].RuleID, results[j].RuleID) > 0 + }) + resultWg := sync.WaitGroup{} + resultWg.Add(1) + go func() { + defer resultWg.Done() + for res := range resChan { + if opts.ProgressPrinter != nil { + opts.ProgressPrinter(os.Stdout, res) + } + for _, r := range res { + if r.Error != nil { + anyErrored = true + } + if !r.Passed { + anyFailed = true + } + } + results = append(results, res...) + } + }() + resultWg.Wait() + if anyErrored { + return results, fmt.Errorf("failed to execute one or more tests") + } + if anyFailed { + return results, fmt.Errorf("one or more tests failed") + } + return results, nil +} + +func runWorker(wg *sync.WaitGroup, inChan chan workerInput, outChan chan []Result) { + defer wg.Done() + for input := range inChan { + results := []Result{} + // users can override the base provider settings file + baseProviderConfig := defaultProviderConfig + if input.opts.BaseProviderConfig != nil { + baseProviderConfig = input.opts.BaseProviderConfig + } + // within a tests file, we group tests by analysis params + testGroups := groupTestsByAnalysisParams(input.testsFile.Tests) + for _, tests := range testGroups { + tempDir, err := os.MkdirTemp(input.opts.TempDir, "rules-test-") + if err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: fmt.Errorf("failed creating temp dir - %w", err)}) + continue + } + // print analysis logs to a file + logFile, err := os.OpenFile(filepath.Join(tempDir, "analysis.log"), os.O_CREATE|os.O_APPEND|os.O_RDWR, 0644) + if err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: fmt.Errorf("failed creating a log file - %w", err)}) + logFile.Close() + continue + } + baseLogger := logrus.New() + baseLogger.SetOutput(logFile) + baseLogger.SetLevel(logrus.InfoLevel) + logger := logrusr.New(baseLogger) + // write rules + err = ensureRules(input.testsFile.RulesPath, tempDir, tests) + if err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: fmt.Errorf("failed writing rules - %w", err)}) + logFile.Close() + continue + } + analysisParams := tests[0].TestCases[0].AnalysisParams + // write provider settings file + volumes, err := ensureProviderSettings(tempDir, input.opts.RunLocal, input.testsFile, baseProviderConfig, analysisParams) + if err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: fmt.Errorf("failed writing provider settings - %w", err)}) + logFile.Close() + continue + } + volumes[tempDir] = "/shared/" + reproducerCmd := "" + switch { + case input.opts.RunLocal: + if reproducerCmd, err = runLocal(logFile, tempDir, analysisParams); err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: err}) + logFile.Close() + continue + } + default: + if reproducerCmd, err = runInContainer(logger, input.opts.ContainerImage, logFile, volumes, analysisParams); err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: err}) + logFile.Close() + continue + } + } + // write reproducer command to a file + os.WriteFile(filepath.Join(tempDir, "reproducer.sh"), []byte(reproducerCmd), 0755) + // process output + outputRulesets := []konveyor.RuleSet{} + content, err := os.ReadFile(filepath.Join(tempDir, "output.yaml")) + if err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: fmt.Errorf("failed reading output - %w", err)}) + logFile.Close() + continue + } + err = yaml.Unmarshal(content, &outputRulesets) + if err != nil { + results = append(results, Result{ + TestsFilePath: input.testsFile.Path, + Error: fmt.Errorf("failed unmarshaling output %s", filepath.Join(tempDir, "output.yaml"))}) + logFile.Close() + continue + } + anyFailed := false + groupResults := []Result{} + for _, test := range tests { + for _, tc := range test.TestCases { + result := Result{ + TestsFilePath: input.testsFile.Path, + RuleID: test.RuleID, + TestCaseName: tc.Name, + } + result.FailureReasons = tc.Verify(outputRulesets[0]) + if len(result.FailureReasons) == 0 { + result.Passed = true + } else { + anyFailed = true + result.DebugInfo = append(result.DebugInfo, + fmt.Sprintf("find debug data in %s", tempDir)) + } + groupResults = append(groupResults, result) + } + } + results = append(results, groupResults...) + if !anyFailed { + os.RemoveAll(tempDir) + } + logFile.Close() + } + outChan <- results + } +} + +func runLocal(logFile io.Writer, dir string, analysisParams AnalysisParams) (string, error) { + // run analysis in a container + args := []string{ + "--provider-settings", + filepath.Join(dir, "provider_settings.json"), + "--output-file", + filepath.Join(dir, "output.yaml"), + "--rules", + filepath.Join(dir, "rules.yaml"), + } + if analysisParams.DepLabelSelector != "" { + args = append(args, []string{ + "--dep-label-selector", + analysisParams.DepLabelSelector, + }...) + } + cmd := exec.Command("konveyor-analyzer", args...) + cmd.Stdout = logFile + cmd.Stderr = logFile + return fmt.Sprintf("konveyor-analyzer", strings.Join(args, " ")), cmd.Run() +} + +func runInContainer(consoleLogger logr.Logger, image string, logFile io.Writer, volumes map[string]string, analysisParams AnalysisParams) (string, error) { + if image == "" { + image = "quay.io/konveyor/analyzer-lsp:latest" + } + // run analysis in a container + args := []string{ + "--provider-settings", + "/shared/provider_settings.json", + "--output-file", + "/shared/output.yaml", + "--rules", + "/shared/rules.yaml", + } + if analysisParams.DepLabelSelector != "" { + args = append(args, []string{ + "--dep-label-selector", + analysisParams.DepLabelSelector, + }...) + } + reproducerCmd := "" + err := container.NewContainer().Run( + context.TODO(), + container.WithImage(image), + container.WithLog(consoleLogger), + container.WithEntrypointBin("konveyor-analyzer"), + container.WithEntrypointArgs(args...), + container.WithVolumes(volumes), + container.WithWorkDir("/shared/"), + container.WithStderr(logFile), + container.WithStdout(logFile), + container.WithReproduceCmd(&reproducerCmd), + ) + if err != nil { + return reproducerCmd, fmt.Errorf("failed running analysis - %w", err) + } + return reproducerCmd, nil +} + +func ensureRules(rulesPath string, tempDirPath string, group []Test) error { + allRules := []map[string]interface{}{} + neededRules := map[string]interface{}{} + for _, test := range group { + neededRules[test.RuleID] = nil + } + content, err := os.ReadFile(rulesPath) + if err != nil { + return fmt.Errorf("failed to read rules file %s (%w)", rulesPath, err) + } + err = yaml.Unmarshal(content, &allRules) + if err != nil { + return fmt.Errorf("error unmarshaling rules at path %s (%w)", rulesPath, err) + } + foundRules := []map[string]interface{}{} + for neededRule := range neededRules { + found := false + for _, foundRule := range allRules { + if foundRule["ruleID"] == neededRule { + found = true + foundRules = append(foundRules, foundRule) + break + } + } + if !found { + return fmt.Errorf("rule %s not found in file %s", neededRule, rulesPath) + } + } + + content, err = yaml.Marshal(foundRules) + if err != nil { + return fmt.Errorf("failed marshaling rules - %w", err) + } + err = os.WriteFile(filepath.Join(tempDirPath, "rules.yaml"), content, 0644) + if err != nil { + return fmt.Errorf("failed writing rules file - %w", err) + } + return nil +} + +func ensureProviderSettings(tempDirPath string, runLocal bool, testsFile TestsFile, baseProviders []provider.Config, params AnalysisParams) (map[string]string, error) { + final := []provider.Config{} + volumes := map[string]string{} + // we need to get data paths defined in the tests file to populate location fields in provider settings + // depending on whether we run locally, or in a container, we will either use local paths or mounted paths + switch { + case runLocal: + // when running locally, we use the paths as-is + for _, override := range testsFile.Providers { + dataPath := filepath.Join(filepath.Dir(testsFile.Path), filepath.Clean(override.DataPath)) + for idx := range baseProviders { + base := &baseProviders[idx] + if base.Name == override.Name { + initConf := &base.InitConfig[0] + base.ContextLines = 100 + initConf.AnalysisMode = params.Mode + switch base.Name { + case "python", "go", "nodejs": + initConf.ProviderSpecificConfig["workspaceFolders"] = []string{dataPath} + default: + initConf.Location = dataPath + } + final = append(final, *base) + } + } + } + default: + // in containers, we need to make sure we only mount unique path trees + // to avoid mounting a directory and its subdirectory to two different paths + uniqueTrees := map[string]bool{} + toDelete := []string{} + for _, prov := range testsFile.Providers { + found := false + for tree := range uniqueTrees { + if tree != prov.DataPath && (strings.Contains(tree, prov.DataPath) || strings.Contains(prov.DataPath, tree)) { + found = true + if len(tree) > len(prov.DataPath) { + toDelete = append(toDelete, tree) + uniqueTrees[prov.DataPath] = true + } else { + toDelete = append(toDelete, prov.DataPath) + uniqueTrees[tree] = true + } + } + } + if !found { + uniqueTrees[prov.DataPath] = true + } + } + for _, key := range toDelete { + delete(uniqueTrees, key) + } + for uniquePath := range uniqueTrees { + volumes[filepath.Join(filepath.Dir(testsFile.Path), uniquePath)] = path.Join("/data", uniquePath) + } + for _, override := range testsFile.Providers { + mountedDataPath := path.Join("/data", filepath.Clean(override.DataPath)) + for idx := range baseProviders { + base := &baseProviders[idx] + base.ContextLines = 100 + if base.Name == override.Name { + initConf := &base.InitConfig[0] + initConf.AnalysisMode = params.Mode + switch base.Name { + case "python", "go", "nodejs": + initConf.ProviderSpecificConfig["workspaceFolders"] = []string{mountedDataPath} + default: + initConf.Location = mountedDataPath + } + final = append(final, *base) + } + } + } + } + content, err := json.Marshal(final) + if err != nil { + return nil, fmt.Errorf("failed marshaling provider settings - %w", err) + } + err = os.WriteFile(filepath.Join(tempDirPath, "provider_settings.json"), content, 0644) + if err != nil { + return nil, fmt.Errorf("failed writing provider settings file - %w", err) + } + return volumes, nil +} + +func groupTestsByAnalysisParams(tests []Test) [][]Test { + grouped := map[string]map[string]*Test{} + for _, t := range tests { + testKey := t.RuleID + for _, tc := range t.TestCases { + paramsKey := fmt.Sprintf("%s-%s", + tc.AnalysisParams.DepLabelSelector, tc.AnalysisParams.Mode) + if _, ok := grouped[paramsKey]; !ok { + grouped[paramsKey] = map[string]*Test{} + } + if _, ok := grouped[paramsKey][testKey]; !ok { + grouped[paramsKey][testKey] = &Test{ + RuleID: t.RuleID, + TestCases: []TestCase{}, + } + } + grouped[paramsKey][testKey].TestCases = append( + grouped[paramsKey][testKey].TestCases, tc) + } + } + groupedList := [][]Test{} + for _, tests := range grouped { + currentGroup := []Test{} + for _, v := range tests { + currentGroup = append(currentGroup, *v) + } + groupedList = append(groupedList, currentGroup) + } + return groupedList +} diff --git a/pkg/testing/runner_test.go b/pkg/testing/runner_test.go new file mode 100644 index 0000000..09da483 --- /dev/null +++ b/pkg/testing/runner_test.go @@ -0,0 +1,44 @@ +package testing + +import ( + "os" + "testing" +) + +// ENV_RUN_LOCAL enables the runner to run analysis locally instead of a container +// this must be set in CI containers to make sure we are not launching containers +const ENV_RUN_LOCAL = "RUN_LOCAL" +const RUNNER_IMG = "RUNNER_IMG" + +func Test_defaultRunner_Run(t *testing.T) { + tests := []struct { + name string + testFiles []string + wantErr bool + wantResult []Result + }{ + { + name: "simple test", + testFiles: []string{ + "./examples/ruleset/discovery.test.yaml", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testFiles, err := Parse(tt.testFiles, nil) + if err != nil { + t.Errorf("failed setting up test") + } + _, runLocal := os.LookupEnv(ENV_RUN_LOCAL) + r := NewRunner() + _, err = r.Run(testFiles, TestOptions{ + RunLocal: runLocal, + ContainerImage: os.Getenv(RUNNER_IMG), + }) + if (err == nil) != tt.wantErr { + t.Errorf("runner.Run() expected no error, got error %v", err) + } + }) + } +} From 7aa2c5a9c4b369ffec041801a8db214a1ac0aa3d Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Wed, 28 Feb 2024 08:59:11 -0500 Subject: [PATCH 4/9] :sparkles: add examples Signed-off-by: Pranav Gaikwad --- pkg/testing/examples/rules-file.test.yaml | 10 ++ pkg/testing/examples/rules-file.yaml | 29 +++ .../examples/ruleset/discovery.test.yaml | 19 ++ pkg/testing/examples/ruleset/discovery.yaml | 19 ++ .../examples/ruleset/local-storage.test.yml | 13 ++ .../examples/ruleset/local-storage.yml | 30 ++++ .../ruleset/test-data/java/Dockerfile | 26 +++ .../examples/ruleset/test-data/java/Makefile | 42 +++++ .../examples/ruleset/test-data/java/pom.xml | 165 ++++++++++++++++++ .../OrderManagementAppInitializer.java | 31 ++++ .../config/PersistenceConfig.java | 74 ++++++++ .../ordermanagement/config/WebConfig.java | 11 ++ .../controller/CustomerController.java | 40 +++++ .../exception/ResourceNotFoundException.java | 27 +++ .../handler/ExceptionHandlingController.java | 23 +++ .../demo/ordermanagement/model/Customer.java | 98 +++++++++++ .../repository/CustomerRepository.java | 8 + .../service/CustomerService.java | 36 ++++ .../service/ICustomerService.java | 12 ++ .../java/src/main/resources/import.sql | 5 + .../src/main/resources/persistence.properties | 7 + .../ruleset/test-data/python/file_a.py | 6 + .../ruleset/test-data/python/file_b.py | 9 + .../examples/ruleset/test-data/python/main.py | 25 +++ .../ruleset/test-data/python/requirements.txt | 24 +++ .../examples/ruleset/testing-config.yaml | 9 + 26 files changed, 798 insertions(+) create mode 100644 pkg/testing/examples/rules-file.test.yaml create mode 100644 pkg/testing/examples/rules-file.yaml create mode 100644 pkg/testing/examples/ruleset/discovery.test.yaml create mode 100644 pkg/testing/examples/ruleset/discovery.yaml create mode 100644 pkg/testing/examples/ruleset/local-storage.test.yml create mode 100644 pkg/testing/examples/ruleset/local-storage.yml create mode 100644 pkg/testing/examples/ruleset/test-data/java/Dockerfile create mode 100644 pkg/testing/examples/ruleset/test-data/java/Makefile create mode 100644 pkg/testing/examples/ruleset/test-data/java/pom.xml create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/OrderManagementAppInitializer.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/PersistenceConfig.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/WebConfig.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/controller/CustomerController.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/ResourceNotFoundException.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/handler/ExceptionHandlingController.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/model/Customer.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/repository/CustomerRepository.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/CustomerService.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/ICustomerService.java create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/resources/import.sql create mode 100644 pkg/testing/examples/ruleset/test-data/java/src/main/resources/persistence.properties create mode 100644 pkg/testing/examples/ruleset/test-data/python/file_a.py create mode 100644 pkg/testing/examples/ruleset/test-data/python/file_b.py create mode 100644 pkg/testing/examples/ruleset/test-data/python/main.py create mode 100644 pkg/testing/examples/ruleset/test-data/python/requirements.txt create mode 100644 pkg/testing/examples/ruleset/testing-config.yaml diff --git a/pkg/testing/examples/rules-file.test.yaml b/pkg/testing/examples/rules-file.test.yaml new file mode 100644 index 0000000..00904cb --- /dev/null +++ b/pkg/testing/examples/rules-file.test.yaml @@ -0,0 +1,10 @@ +tests: + - ruleID: storage-000 + testCases: + - name: tc-00 + hasIncidents: + locations: + - lineNumber: 7 + fileURI: ./test-data/java/src/main/resources/persistence.properties + messageMatches: file://common.properties + codeSnipMatches: file://common.properties \ No newline at end of file diff --git a/pkg/testing/examples/rules-file.yaml b/pkg/testing/examples/rules-file.yaml new file mode 100644 index 0000000..88228fd --- /dev/null +++ b/pkg/testing/examples/rules-file.yaml @@ -0,0 +1,29 @@ +- ruleID: storage-000 + description: Hardcoded local files in properties + labels: + - component=storage + message: Found access to a local file {{ matchingText }} + when: + builtin.filecontent: + filePattern: .*\.(\\\.java|\\\.properties|\\\.jsp|\\\.jspf|\\\.tag|[^pom]\\\.xml|\\\.txt) + pattern: file:// +- ruleID: storage-001 + labels: + - component=storage + message: Application may lose access to local storage in container environment + when: + or: + - java.referenced: + location: CONSTRUCTOR_CALL + pattern: java.io.(FileWriter|FileReader|PrintStream|File|PrintWriter|RandomAccessFile)* + - java.referenced: + location: METHOD_CALL + pattern: java.io.File.createTempFile* + - java.referenced: + location: METHOD_CALL + pattern: java.nio.file.Paths.get* + - python.referenced: + pattern: os_open + - python.referenced: + pattern: safe_load + \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/discovery.test.yaml b/pkg/testing/examples/ruleset/discovery.test.yaml new file mode 100644 index 0000000..a59bfac --- /dev/null +++ b/pkg/testing/examples/ruleset/discovery.test.yaml @@ -0,0 +1,19 @@ +providers: +# override ruleset level path +- name: builtin + dataPath: ./test-data/python/ +tests: + - ruleID: language-discovery + testCases: + - name: tc-00 + hasTags: + - Python + - ruleID: kube-api-usage + testCases: + - name: tc-00 + description: kubernetes tag must be found + hasTags: + - Kubernetes + - name: tc-01 + hasIncidents: + exactly: 1 diff --git a/pkg/testing/examples/ruleset/discovery.yaml b/pkg/testing/examples/ruleset/discovery.yaml new file mode 100644 index 0000000..21ab175 --- /dev/null +++ b/pkg/testing/examples/ruleset/discovery.yaml @@ -0,0 +1,19 @@ +- ruleID: kube-api-usage + description: Discover Kubernetes api usage + labels: + - discovery + tag: + - Kubernetes + message: Python Kubernetes client used + when: + python.referenced: + pattern: "create_custom_resource_definition" +- ruleID: language-discovery + description: Found python files + labels: + - discovery + tag: + - Python + when: + builtin.file: + pattern: "*.py" diff --git a/pkg/testing/examples/ruleset/local-storage.test.yml b/pkg/testing/examples/ruleset/local-storage.test.yml new file mode 100644 index 0000000..81e8fcf --- /dev/null +++ b/pkg/testing/examples/ruleset/local-storage.test.yml @@ -0,0 +1,13 @@ +tests: + - ruleID: storage-000 + testCases: + - name: tc-00 + hasIncidents: + locations: + - fileURI: ./test-data/java/src/main/resources/persistence.properties + lineNumber: 7 + messageMatches: file://common.properties + codeSnipMatches: file://common.properties + + + \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/local-storage.yml b/pkg/testing/examples/ruleset/local-storage.yml new file mode 100644 index 0000000..3f8182d --- /dev/null +++ b/pkg/testing/examples/ruleset/local-storage.yml @@ -0,0 +1,30 @@ +- ruleID: storage-000 + description: Hardcoded local files in properties + labels: + - component=storage + message: Found access to a local file {{ matchingText }} + when: + builtin.filecontent: + filePattern: .*(\.java|\.properties|\.jsp|\.jspf|\.tag|[^pom]\.xml|\.txt) + pattern: file://.* +- ruleID: storage-001 + description: Usage of Java local storage apis + labels: + - component=storage + message: Application may lose access to local storage in container environment + when: + or: + - java.referenced: + location: CONSTRUCTOR_CALL + pattern: java.io.(FileWriter|FileReader|PrintStream|File|PrintWriter|RandomAccessFile)* + - java.referenced: + location: METHOD_CALL + pattern: java.io.File.createTempFile* + - java.referenced: + location: METHOD_CALL + pattern: java.nio.file.Paths.get* + - python.referenced: + pattern: os_open + - python.referenced: + pattern: safe_load + \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/test-data/java/Dockerfile b/pkg/testing/examples/ruleset/test-data/java/Dockerfile new file mode 100644 index 0000000..7b2e814 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/Dockerfile @@ -0,0 +1,26 @@ +######################################## +# Build Image +######################################## +# FROM maven:3.6-jdk-8-slim as build +FROM maven:3.8-openjdk-11 as build + +WORKDIR /app + +# Establish the dependency layer +COPY pom.xml . +RUN mvn dependency:resolve + +# Add the source code and package +COPY src ./src +RUN mvn package + +######################################## +# Production Image +######################################## +# FROM tomcat:9-jdk8-openjdk-slim +FROM tomcat:9-jdk11-openjdk-slim + +COPY --from=build --chown=1001:0 /app/target/customers-tomcat-0.0.1-SNAPSHOT.war /usr/local/tomcat/webapps/ROOT.war + +EXPOSE 8080 +CMD ["catalina.sh", "run"] diff --git a/pkg/testing/examples/ruleset/test-data/java/Makefile b/pkg/testing/examples/ruleset/test-data/java/Makefile new file mode 100644 index 0000000..39349e4 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/Makefile @@ -0,0 +1,42 @@ +# These can be overidden with environment variables of the same name +REGISTRY ?= quay.io +REPOSITORY ?= rofrano +IMAGE ?= customers-tomcat +TAG ?= 0.1 +LOCAL_IMAGE_NAME ?= $(IMAGE):$(TAG) +REMOTE_IMAGE_NAME ?= $(REGISTRY)/$(REPOSITORY)/$(IMAGE):$(TAG) + +all: build + +## help: Lists help on the commands +.PHONY: help +help: Makefile + @sed -ne '/@sed/!s/## //p' $(MAKEFILE_LIST) + +## Removes all dangling and built images +.PHONY: clean +clean: remove + $(info Removing all dangling build cache) + echo Y | docker image prune + +.PHONY: build +build: ## Build all of the project Docker images + $(info Building $(LOCAL_IMAGE_NAME) image...) + docker build -t $(LOCAL_IMAGE_NAME) . + +.PHONY: run +run: ## Run a vagrant VM using this image + $(info Bringing up $(LOCAL_IMAGE_NAME)...) + docker run --rm -p 8080:8080 $(LOCAL_IMAGE_NAME) + +.PHONY: remove +remove: ## Removes all built images + $(info Removing all built images...) + docker rmi $(REMOTE_IMAGE_NAME) + docker rmi $(LOCAL_IMAGE_NAME) + +.PHONY: push +push: ## Push image to docker repository + $(info Pushing $(LOCAL_IMAGE_NAME) image...) + docker tag $(LOCAL_IMAGE_NAME) $(REMOTE_IMAGE_NAME) + docker push $(REMOTE_IMAGE_NAME) diff --git a/pkg/testing/examples/ruleset/test-data/java/pom.xml b/pkg/testing/examples/ruleset/test-data/java/pom.xml new file mode 100644 index 0000000..5fd20fd --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/pom.xml @@ -0,0 +1,165 @@ + + 4.0.0 + io.konveyor.demo + customers-tomcat + 0.0.1-SNAPSHOT + + Order Management + war + Remaining services for the legacy Order Management application + + + 1.8 + UTF-8 + UTF-8 + ${java.version} + ${java.version} + 5.3.7 + 9.0.46 + 2021.0.1 + 5.4.32.Final + 6.2.0.Final + + 42.2.20 + 2.12.3 + + 3.8.1 + 3.3.1 + 3.2.0 + + + + + + demo-config + Azure DevOps + https://pkgs.dev.azure.com/ShawnHurley21/demo-config-utils/_packaging/demo-config/maven/v1 + + + + + + + com.fasterxml.jackson + jackson-bom + ${jackson.version} + import + pom + + + org.springframework.data + spring-data-bom + ${spring-data.version} + import + pom + + + + + + org.apache.tomcat + tomcat-servlet-api + ${tomcat.version} + provided + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + org.springframework.data + spring-data-jpa + + + + org.springframework + spring-jdbc + ${spring-framework.version} + + + org.springframework + spring-webmvc + ${spring-framework.version} + + + org.springframework + spring-web + ${spring-framework.version} + + + org.springframework.boot + spring-boot-starter-actuator + 2.5.0 + + + org.apache.tomcat + tomcat-jdbc + ${tomcat.version} + runtime + + + org.hibernate + hibernate-entitymanager + ${hibernate.version} + + + org.hibernate.validator + hibernate-validator + ${hibernate-validator.version} + + + ch.qos.logback + logback-classic + 1.1.7 + + + com.oracle.database.jdbc + ojdbc8 + 21.1.0.0 + + + org.postgresql + postgresql + 42.2.23 + + + + io.konveyor.demo + config-utils + 1.0.0 + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + + org.apache.maven.plugins + maven-war-plugin + ${maven-war-plugin.version} + + false + + + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + + UTF-8 + + + + + + diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/OrderManagementAppInitializer.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/OrderManagementAppInitializer.java new file mode 100644 index 0000000..3fd3822 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/OrderManagementAppInitializer.java @@ -0,0 +1,31 @@ +package io.konveyor.demo.ordermanagement; + +import javax.servlet.ServletContext; +import javax.servlet.ServletException; +import javax.servlet.ServletRegistration; + +import org.springframework.web.WebApplicationInitializer; +import org.springframework.web.context.ContextLoaderListener; +import org.springframework.web.context.support.AnnotationConfigWebApplicationContext; +import org.springframework.web.servlet.DispatcherServlet; + + +public class OrderManagementAppInitializer implements WebApplicationInitializer { + + @Override + public void onStartup(ServletContext container) throws ServletException { + AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext(); + context.setConfigLocation("io.konveyor.demo.ordermanagement.config"); + + context.scan("io.konveyor.demo.ordermanagement"); + container.addListener(new ContextLoaderListener(context)); + + ServletRegistration.Dynamic dispatcher = container + .addServlet("dispatcher", new DispatcherServlet(context)); + + dispatcher.setLoadOnStartup(1); + dispatcher.addMapping("/"); + + } + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/PersistenceConfig.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/PersistenceConfig.java new file mode 100644 index 0000000..f4cc14d --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/PersistenceConfig.java @@ -0,0 +1,74 @@ +package io.konveyor.demo.ordermanagement.config; + +import java.util.Properties; + +import javax.sql.DataSource; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.annotation.PersistenceExceptionTranslationPostProcessor; +import org.springframework.data.jpa.repository.config.EnableJpaRepositories; +import org.springframework.data.web.config.EnableSpringDataWebSupport; +import org.springframework.jdbc.datasource.DriverManagerDataSource; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +import io.konveyor.demo.config.ApplicationConfiguration; + +@Configuration +@EnableJpaRepositories(basePackages = { + "io.konveyor.demo.ordermanagement.repository" +}) +@EnableTransactionManagement +@EnableSpringDataWebSupport +public class PersistenceConfig { + + + @Bean + public LocalContainerEntityManagerFactoryBean entityManagerFactory() { + final LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean(); + em.setDataSource(dataSource()); + em.setPackagesToScan("io.konveyor.demo.ordermanagement.model"); + em.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); + em.setJpaProperties(additionalProperties()); + + return em; + } + + @Bean + public DataSource dataSource() { + ApplicationConfiguration config = new ApplicationConfiguration(); + final DriverManagerDataSource dataSource = new DriverManagerDataSource(); + dataSource.setDriverClassName(config.getProperty("jdbc.driverClassName")); + dataSource.setUrl(config.getProperty("jdbc.url")); + dataSource.setUsername(config.getProperty("jdbc.user")); + dataSource.setPassword(config.getProperty("jdbc.password")); + + return dataSource; + } + + @Bean + public PlatformTransactionManager transactionManager() { + final JpaTransactionManager transactionManager = new JpaTransactionManager(); + transactionManager.setEntityManagerFactory(entityManagerFactory().getObject()); + return transactionManager; + } + + @Bean + public PersistenceExceptionTranslationPostProcessor exceptionTranslation() { + return new PersistenceExceptionTranslationPostProcessor(); + } + + final Properties additionalProperties() { + ApplicationConfiguration config = new ApplicationConfiguration(); + final Properties hibernateProperties = new Properties(); + hibernateProperties.setProperty("hibernate.hbm2ddl.auto", config.getProperty("hibernate.hbm2ddl.auto")); + hibernateProperties.setProperty("hibernate.dialect", config.getProperty("hibernate.dialect")); + hibernateProperties.setProperty("hibernate.cache.use_second_level_cache", "false"); + + return hibernateProperties; + } +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/WebConfig.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/WebConfig.java new file mode 100644 index 0000000..e234893 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/config/WebConfig.java @@ -0,0 +1,11 @@ +package io.konveyor.demo.ordermanagement.config; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; +import org.springframework.context.annotation.Configuration; + +@Configuration +@EnableAutoConfiguration(exclude = { DataSourceAutoConfiguration.class }) +public class WebConfig { + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/controller/CustomerController.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/controller/CustomerController.java new file mode 100644 index 0000000..7726cc8 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/controller/CustomerController.java @@ -0,0 +1,40 @@ +package io.konveyor.demo.ordermanagement.controller; + +import org.jboss.logging.Logger; +import io.konveyor.demo.ordermanagement.exception.ResourceNotFoundException; +import io.konveyor.demo.ordermanagement.model.Customer; +import io.konveyor.demo.ordermanagement.service.CustomerService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("/customers") +public class CustomerController { + + @Autowired + private CustomerService customerService; + + private static Logger logger = Logger.getLogger( CustomerController.class.getName() ); + + @GetMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + public Customer getById(@PathVariable("id") Long id) { + Customer c = customerService.findById(id); + if (c == null) { + throw new ResourceNotFoundException("Requested order doesn't exist"); + } + logger.debug("Returning element: " + c); + return c; + } + + @RequestMapping + public Page findAll(Pageable pageable){ + return customerService.findAll(pageable); + } + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/ResourceNotFoundException.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/ResourceNotFoundException.java new file mode 100644 index 0000000..628a1e4 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/ResourceNotFoundException.java @@ -0,0 +1,27 @@ +/* + * Copyright 2016-2017 Red Hat, Inc, and individual contributors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.konveyor.demo.ordermanagement.exception; + +public class ResourceNotFoundException extends RuntimeException { + + private static final long serialVersionUID = -8451289866722180029L; + + public ResourceNotFoundException(String message) { + super(message); + } + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/handler/ExceptionHandlingController.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/handler/ExceptionHandlingController.java new file mode 100644 index 0000000..d783b9f --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/exception/handler/ExceptionHandlingController.java @@ -0,0 +1,23 @@ +package io.konveyor.demo.ordermanagement.exception.handler; + + +import org.jboss.logging.Logger; +import io.konveyor.demo.ordermanagement.exception.ResourceNotFoundException; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.ControllerAdvice; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.ResponseStatus; + +@ControllerAdvice +public class ExceptionHandlingController { + + private static Logger logger = Logger.getLogger( ExceptionHandlingController.class.getName() ); + + @ResponseStatus(value = HttpStatus.NOT_FOUND, + reason = "Resource not found") + @ExceptionHandler(ResourceNotFoundException.class) + public void resourceNotFound(ResourceNotFoundException e) { + logger.warn("Resource not found: " + e.getMessage()); + } + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/model/Customer.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/model/Customer.java new file mode 100644 index 0000000..2bb8d8c --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/model/Customer.java @@ -0,0 +1,98 @@ +package io.konveyor.demo.ordermanagement.model; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +@Entity +@Table(name = "customers") +public class Customer { + @Id + @SequenceGenerator( + name = "customersSequence", + sequenceName = "customers_id_seq", + allocationSize = 1, + initialValue = 6) + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "customersSequence") + private Long id; + + @Column(length = 20) + private String username; + + @Column(length = 20) + private String name; + + @Column(length = 40) + private String surname; + + @Column(length = 250) + private String address; + + @Column(name = "zipcode", length = 10) + private String zipCode; + + @Column(length = 40) + private String city; + + @Column(length = 40) + private String country; + + public Long getId() { + return id; + } + public void setId(Long id) { + this.id = id; + } + public String getUsername() { + return username; + } + public void setUsername(String username) { + this.username = username; + } + public String getName() { + return name; + } + public void setName(String name) { + this.name = name; + } + public String getSurname() { + return surname; + } + public void setSurname(String surname) { + this.surname = surname; + } + public String getAddress() { + return address; + } + public void setAddress(String address) { + this.address = address; + } + public String getZipCode() { + return zipCode; + } + public void setZipCode(String zipCode) { + this.zipCode = zipCode; + } + public String getCity() { + return city; + } + public void setCity(String city) { + this.city = city; + } + public String getCountry() { + return country; + } + public void setCountry(String country) { + this.country = country; + } + + @Override + public String toString() { + return "Customer [id=" + id + ", username=" + username + ", name=" + name + ", surname=" + surname + + ", address=" + address + ", zipCode=" + zipCode + ", city=" + city + ", country=" + country + "]"; + } +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/repository/CustomerRepository.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/repository/CustomerRepository.java new file mode 100644 index 0000000..b48fb4b --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/repository/CustomerRepository.java @@ -0,0 +1,8 @@ +package io.konveyor.demo.ordermanagement.repository; + +import io.konveyor.demo.ordermanagement.model.Customer; +import org.springframework.data.repository.PagingAndSortingRepository; + +public interface CustomerRepository extends PagingAndSortingRepository { + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/CustomerService.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/CustomerService.java new file mode 100644 index 0000000..1a8d652 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/CustomerService.java @@ -0,0 +1,36 @@ +package io.konveyor.demo.ordermanagement.service; + + +import org.jboss.logging.Logger; +import io.konveyor.demo.ordermanagement.model.Customer; +import io.konveyor.demo.ordermanagement.repository.CustomerRepository; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +@Service +@Transactional +public class CustomerService implements ICustomerService{ + + @Autowired + private CustomerRepository repository; + + private static Logger logger = Logger.getLogger( CustomerService.class.getName() ); + + public Customer findById(Long id) { + logger.debug("Entering CustomerService.findById()"); + Customer c = repository.findById(id).orElse(null); + logger.debug("Returning element: " + c); + return c; + } + + public PagefindAll(Pageable pageable) { + logger.debug("Entering CustomerService.findAll()"); + Page p = repository.findAll(pageable); + logger.debug("Returning element: " + p); + return p; + } + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/ICustomerService.java b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/ICustomerService.java new file mode 100644 index 0000000..aceb1ec --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/java/io/konveyor/demo/ordermanagement/service/ICustomerService.java @@ -0,0 +1,12 @@ +package io.konveyor.demo.ordermanagement.service; + +import io.konveyor.demo.ordermanagement.model.Customer; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +public interface ICustomerService { + public Customer findById(Long id); + + public PagefindAll(Pageable pageable); + +} diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/resources/import.sql b/pkg/testing/examples/ruleset/test-data/java/src/main/resources/import.sql new file mode 100644 index 0000000..28c3a57 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/resources/import.sql @@ -0,0 +1,5 @@ +INSERT INTO customers(id, username, name, surname, address, zipcode, city, country) VALUES (1, 'phlegm_master_19', 'Guybrush', 'Threepwood', '1060 West Addison', 'ME-001', 'Melee Town', 'Melee Island'); +INSERT INTO customers(id, username, name, surname, address, zipcode, city, country) VALUES (2, 'hate_guybrush', 'Pirate', 'Lechuck', 'Caverns of Meat, no number', 'MO-666', 'Giant Monkey Head', 'Monkey Island'); +INSERT INTO customers(id, username, name, surname, address, zipcode, city, country) VALUES (3, 'the_governor_em', 'Elaine', 'Marley', 'PO Box 1', 'BO-001', 'Ville de la Booty', 'Booty Island'); +INSERT INTO customers(id, username, name, surname, address, zipcode, city, country) VALUES (4, 'rescue_me', 'Herman', 'Toothrot', '1110 Gorgas Ave', '94129', 'Dinky Beach', 'Dinky Island'); +INSERT INTO customers(id, username, name, surname, address, zipcode, city, country) VALUES (5, 'i_rule_scabb', 'Largo', 'LaGrande', 'Swamp Rot Inn', 'SC-002', 'Woodtick', 'Scabb Island'); \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/test-data/java/src/main/resources/persistence.properties b/pkg/testing/examples/ruleset/test-data/java/src/main/resources/persistence.properties new file mode 100644 index 0000000..d7a5297 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/java/src/main/resources/persistence.properties @@ -0,0 +1,7 @@ +jdbc.driverClassName=oracle.jdbc.driver.OracleDriver +jdbc.url=jdbc:oracle:thin:@169.60.225.216:1521/XEPDB1 +jdbc.user=customers +jdbc.password=customers +hibernate.hbm2ddl.auto=create-drop +hibernate.dialect=org.hibernate.dialect.OracleDialect +common.properties=file://common.properties \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/test-data/python/file_a.py b/pkg/testing/examples/ruleset/test-data/python/file_a.py new file mode 100644 index 0000000..4de0e38 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/python/file_a.py @@ -0,0 +1,6 @@ +import file_b + +print(file_b.hello_world()) + +doggie = file_b.Dog() +print(doggie.speak()) \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/test-data/python/file_b.py b/pkg/testing/examples/ruleset/test-data/python/file_b.py new file mode 100644 index 0000000..b640199 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/python/file_b.py @@ -0,0 +1,9 @@ +def hello_world(): + return "Hello, world!" + +class Dog(object): + def __init__(self) -> None: + pass + + def speak(self): + return "Woof!" \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/test-data/python/main.py b/pkg/testing/examples/ruleset/test-data/python/main.py new file mode 100644 index 0000000..e50176b --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/python/main.py @@ -0,0 +1,25 @@ +# Source: https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/ApiextensionsV1Api.md + +from __future__ import print_function +import kubernetes.client +from kubernetes.config import load_kube_config +from kubernetes.client.rest import ApiException +from pprint import pprint + +load_kube_config() + +# Enter a context with an instance of the API kubernetes.client +with kubernetes.client.ApiClient() as api_client: + # Create an instance of the API class + api_instance = kubernetes.client.ApiextensionsV1Api(api_client) + body = kubernetes.client.V1CustomResourceDefinition() # V1CustomResourceDefinition | + pretty = 'pretty_example' # str | If 'true', then the output is pretty printed. (optional) + dry_run = 'dry_run_example' # str | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed (optional) + field_manager = 'field_manager_example' # str | fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. (optional) + field_validation = 'field_validation_example' # str | fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) + + try: + api_response = api_instance.create_custom_resource_definition(body, pretty=pretty, dry_run=dry_run, field_manager=field_manager, field_validation=field_validation) + pprint(api_response) + except ApiException as e: + print("Exception when calling ApiextensionsV1Api->create_custom_resource_definition: %s\n" % e) \ No newline at end of file diff --git a/pkg/testing/examples/ruleset/test-data/python/requirements.txt b/pkg/testing/examples/ruleset/test-data/python/requirements.txt new file mode 100644 index 0000000..69561e3 --- /dev/null +++ b/pkg/testing/examples/ruleset/test-data/python/requirements.txt @@ -0,0 +1,24 @@ +cachetools==5.3.1 +certifi==2023.7.22 +charset-normalizer==3.2.0 +docstring-to-markdown==0.12 +google-auth==2.23.0 +idna==3.4 +jedi==0.19.0 +kubernetes==28.1.0 +oauthlib==3.2.2 +parso==0.8.3 +pluggy==1.3.0 +pyasn1==0.5.0 +pyasn1-modules==0.3.0 +python-dateutil==2.8.2 +python-lsp-jsonrpc==1.1.1 +python-lsp-server==1.8.0 +PyYAML==6.0.1 +requests==2.31.0 +requests-oauthlib==1.3.1 +rsa==4.9 +six==1.16.0 +ujson==5.8.0 +urllib3==1.26.16 +websocket-client==1.6.3 diff --git a/pkg/testing/examples/ruleset/testing-config.yaml b/pkg/testing/examples/ruleset/testing-config.yaml new file mode 100644 index 0000000..0e61f88 --- /dev/null +++ b/pkg/testing/examples/ruleset/testing-config.yaml @@ -0,0 +1,9 @@ +providers: +- name: python + dataPath: ./test-data/python/ +- name: java + dataPath: ./test-data/java/ +- name: builtin + # this intentionally points to test-data + # so we can test overriden setting + dataPath: ./test-data/ \ No newline at end of file From 704cfb19add9807fdb999bc9f3cfe4190c0183a1 Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Wed, 28 Feb 2024 08:59:55 -0500 Subject: [PATCH 5/9] :sparkles: add test runner to CLI Signed-off-by: Pranav Gaikwad --- cmd/root.go | 1 + cmd/settings.go | 1 + cmd/test.go | 52 +++++++++++++++++++++++++++++++++++++++++++ pkg/testing/runner.go | 2 +- 4 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 cmd/test.go diff --git a/cmd/root.go b/cmd/root.go index 299f53c..ec1927c 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -46,6 +46,7 @@ func init() { logger := logrusr.New(logrusLog) rootCmd.AddCommand(NewTransformCommand(logger)) rootCmd.AddCommand(NewAnalyzeCmd(logger)) + rootCmd.AddCommand(NewTestCommand(logger)) rootCmd.AddCommand(NewVersionCommand()) } diff --git a/cmd/settings.go b/cmd/settings.go index ca84089..5f91e4b 100644 --- a/cmd/settings.go +++ b/cmd/settings.go @@ -26,6 +26,7 @@ type Config struct { PodmanBinary string `env:"PODMAN_BIN" default:"/usr/bin/podman"` RunnerImage string `env:"RUNNER_IMG" default:"quay.io/konveyor/kantra"` JvmMaxMem string `env:"JVM_MAX_MEM" default:""` + RunLocal bool `env:"RUN_LOCAL"` } func (c *Config) Load() error { diff --git a/cmd/test.go b/cmd/test.go new file mode 100644 index 0000000..951755a --- /dev/null +++ b/cmd/test.go @@ -0,0 +1,52 @@ +package cmd + +import ( + "os" + + "github.com/go-logr/logr" + "github.com/konveyor-ecosystem/kantra/pkg/testing" + "github.com/spf13/cobra" +) + +type testCommand struct { + testFilterString string + baseProviderSettings string +} + +func NewTestCommand(log logr.Logger) *cobra.Command { + testCmd := &testCommand{} + + testCobraCommand := &cobra.Command{ + Use: "test", + Short: "Test YAML rules", + RunE: func(cmd *cobra.Command, args []string) error { + var testFilter testing.TestsFilter + if testCmd.testFilterString != "" { + testFilter = testing.NewInlineNameBasedFilter(testCmd.testFilterString) + } + tests, err := testing.Parse(args, testFilter) + if err != nil { + log.Error(err, "failed parsing rulesets") + return err + } + if len(tests) == 0 { + log.Info("no tests found") + return nil + } + results, err := testing.NewRunner().Run(tests, testing.TestOptions{ + RunLocal: Settings.RunLocal, + ContainerImage: Settings.RunnerImage, + ProgressPrinter: testing.PrintProgress, + }) + testing.PrintSummary(os.Stdout, results) + if err != nil { + log.Error(err, "failed running tests") + return err + } + return nil + }, + } + testCobraCommand.Flags().StringVarP(&testCmd.testFilterString, "test-filter", "t", "", "filter tests / testcases by their names") + testCobraCommand.Flags().StringVarP(&testCmd.baseProviderSettings, "base-provider-settings", "b", "", "path to a provider settings file the runner will use as base") + return testCobraCommand +} diff --git a/pkg/testing/runner.go b/pkg/testing/runner.go index c0ad575..41a490e 100644 --- a/pkg/testing/runner.go +++ b/pkg/testing/runner.go @@ -341,7 +341,7 @@ func runLocal(logFile io.Writer, dir string, analysisParams AnalysisParams) (str cmd := exec.Command("konveyor-analyzer", args...) cmd.Stdout = logFile cmd.Stderr = logFile - return fmt.Sprintf("konveyor-analyzer", strings.Join(args, " ")), cmd.Run() + return fmt.Sprintf("konveyor-analyzer %s", strings.Join(args, " ")), cmd.Run() } func runInContainer(consoleLogger logr.Logger, image string, logFile io.Writer, volumes map[string]string, analysisParams AnalysisParams) (string, error) { From 03c9a40e23a8a2f1deef5310637f864ba547ff4f Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Wed, 28 Feb 2024 09:00:17 -0500 Subject: [PATCH 6/9] :notebook: update docs Signed-off-by: Pranav Gaikwad --- README.md | 236 ++++++++++++++----------------- docs/{example.md => examples.md} | 0 docs/testrunner.md | 233 ++++++++++++++++++++++++++++++ 3 files changed, 338 insertions(+), 131 deletions(-) rename docs/{example.md => examples.md} (100%) create mode 100644 docs/testrunner.md diff --git a/README.md b/README.md index 9563118..a3efd42 100644 --- a/README.md +++ b/README.md @@ -3,100 +3,83 @@ Kantra is a CLI that unifies analysis and transformation capabilities of Konveyor. ## Installation -The easiest way to install Kantra is to get it via the container image. To download latest container image, run: -### Linux - -```sh -podman cp $(podman create --name kantra-download quay.io/konveyor/kantra:latest):/usr/local/bin/kantra . && podman rm kantra-download -``` - -### MacOS - -**Note:** There is a known [issue](https://github.com/containers/podman/issues/16106) -with limited number of open files in mounted volumes on MacOS, which may affect kantra performance. - -Prior to starting your podman machine, run: - -```sh -ulimit -n unlimited -``` - - - This must be run after each podman machine reboot. - -In order to correctly mount volumes, your podman machine must contain options: - -```sh -podman machine init -v $HOME:$HOME -v /private/tmp:/private/tmp -v /var/folders/:/var/folders/ -``` - -Increase podman resources: - -```sh -podman machine set --cpus 4 --memory 4096 -``` - - -Ensure that we use the connection to the VM `` we created earlier by default: - -```sh -podman system connection default -``` - -```sh -podman pull quay.io/konveyor/kantra:latest && podman run --name kantra-download quay.io/konveyor/kantra:latest 1> /dev/null 2> /dev/null && podman cp kantra-download:/usr/local/bin/darwin-kantra kantra && podman rm kantra-download -``` - -### Windows - -```sh -podman pull quay.io/konveyor/kantra:latest && podman run --name kantra-download quay.io/konveyor/kantra:latest 1> /dev/null 2> /dev/null && podman cp kantra-download:/usr/local/bin/windows-kantra kantra && podman rm kantra-download -``` - ---- - -The above will copy the binary into your current directory. Move it to PATH for system-wide use: - -```sh -sudo mv ./kantra /usr/local/bin/ -``` - -To confirm Kantra is installed, run: - -```sh -kantra --help -``` - -This should display the help message. +The easiest way to install Kantra is to get it via the container image. + +1. To download latest container image using _podman_, follow instructions for your operating system: + + * For Linux, run: + + ```sh + podman cp $(podman create --name kantra-download quay.io/konveyor/kantra:latest):/usr/local/bin/kantra . && podman rm kantra-download + ``` + + * For MacOS + + Prior to starting your podman machine, run: + + ```sh + ulimit -n unlimited + ``` + > This must be run after each podman machine reboot. + + Init your _podman_ machine : + + ```sh + podman machine init -v $HOME:$HOME -v /private/tmp:/private/tmp -v /var/folders/:/var/folders/ + ``` + + Increase podman resources: + + ```sh + podman machine set --cpus 4 --memory 4096 + ``` + + Ensure that we use the connection to the VM `` we created earlier by default: + + ```sh + podman system connection default + ``` + + Finally, run: + + ```sh + podman pull quay.io/konveyor/kantra:latest && podman run --name kantra-download quay.io/konveyor/kantra:latest 1> /dev/null 2> /dev/null && podman cp kantra-download:/usr/local/bin/darwin-kantra kantra && podman rm kantra-download + ``` + + * For Windows, run: + + ```sh + podman pull quay.io/konveyor/kantra:latest && podman run --name kantra-download quay.io/konveyor/kantra:latest 1> /dev/null 2> /dev/null && podman cp kantra-download:/usr/local/bin/windows-kantra kantra && podman rm kantra-download + ``` + +2. The above will copy the binary into your current directory. Move it to PATH for system-wide use: + + ```sh + sudo mv ./kantra /usr/local/bin/ + ``` + +3. To confirm Kantra is installed, run: + + ```sh + kantra --help + ``` + + This should display the help message. ## Usage -Kantra has two subcommands - `analyze` and `transform`: +Kantra has three subcommands: +1. _analyze_: This subcommand allows running source code analysis on input source code or a binary. -```sh -A cli tool for analysis and transformation of applications - -Usage: - kantra [command] - -Available Commands: - analyze Analyze application source code - completion Generate the autocompletion script for the specified shell - help Help about any command - transform Transform application source code or windup XML rules - -Flags: - -h, --help help for kantra - --log-level int log level (default 5) - -Use "kantra [command] --help" for more information about a command. -``` +2. _transform_: This subcommand allows - converting XML rules to YAML, and running OpenRewrite recipes on source code. +3. _test_: This subcommand allows testing YAML rules. ### Analyze -Analyze allows running source code and binary analysis using [analyzer-lsp](https://github.com/konveyor/analyzer-lsp) +_analyze_ subcommand allows running source code and binary analysis using [analyzer-lsp](https://github.com/konveyor/analyzer-lsp) To run analysis on application source code, run: @@ -104,14 +87,11 @@ To run analysis on application source code, run: kantra analyze --input= --output= ``` +_--input_ must point to a source code directory or a binary file, _--output_ must point to a directory to contain analysis results. + All flags: ```sh -Analyze application source code - -Usage: - kantra analyze [flags] - Flags: --analyze-known-libraries analyze known open-source libraries -h, --help help for analyze @@ -125,46 +105,35 @@ Flags: --skip-static-report do not generate static report -s, --source stringArray source technology to consider for analysis. Use multiple times for additional sources: --source --source ... -t, --target stringArray target technology to consider for analysis. Use multiple times for additional targets: --target --target ... - -Global Flags: - --log-level int log level (default 5) ``` ### Transform -Transform has two subcommands - `openrewrite` and `rules`. +Transform has two subcommands: -```sh -Transform application source code or windup XML rules +1. _openrewrite_: This subcommand allows running one or more available OpenRewrite recipes on input source code. -Usage: - kantra transform [flags] - kantra transform [command] +2. _rules_: This sucommand allows converting Windup XML rules into the analyzer-lsp YAML format. -Available Commands: - openrewrite Transform application source code using OpenRewrite recipes - rules Convert XML rules to YAML +#### OpenRewrite -Flags: - -h, --help help for transform +_openrewrite_ subcommand allows running [OpenRewrite](https://docs.openrewrite.org/) recipes on source code. -Global Flags: - --log-level int log level (default 5) +To transform applications using OpenRewrite, run: -Use "kantra transform [command] --help" for more information about a command. +```sh +kantra transform openrewrite --input= --target= ``` -#### OpenRewrite - -`openrewrite` subcommand allows running [OpenRewrite](https://docs.openrewrite.org/) recipes on source code. - +The value of _--target_ option must be one of the available OpenRewrite recipes. To list all available recipes, run: ```sh -Transform application source code using OpenRewrite recipes +kantra transform --list-targets +``` -Usage: - kantra transform openrewrite [flags] +All flags: +```sh Flags: -g, --goal string target goal (default "dryRun") -h, --help help for openrewrite @@ -172,43 +141,48 @@ Flags: -l, --list-targets list all available OpenRewrite recipes -s, --maven-settings string path to a custom maven settings file to use -t, --target string target openrewrite recipe to use. Run --list-targets to get a list of packaged recipes. - -Global Flags: - --log-level int log level (default 5) ``` -To run `transform openrewrite` on application source code, run: +#### Rules + +_rules_ subcommand allows converting Windup XML rules to analyzer-lsp YAML rules using [windup-shim](https://github.com/konveyor/windup-shim) + +To convert Windup XML rules to the analyzer-lsp YAML format, run: ```sh -kantra transform openrewrite --input= --target= +kantra transform rules --input= --output= ``` -#### Rules +_--input_ flag should point to a file or a directory containing XML rules, _--output_ should point to an output directory for YAML rules. -`rules` subcommand allows converting Windup XML rules to analyzer-lsp YAML rules using [windup-shim](https://github.com/konveyor/windup-shim) +All flags: ```sh -Convert XML rules to YAML - -Usage: - kantra transform rules [flags] - Flags: -h, --help help for rules -i, --input stringArray path to XML rule file(s) or directory -o, --output string path to output directory - -Global Flags: - --log-level int log level (default 5) ``` -To run `transform rules` on application source code, run: +### Test + +_test_ subcommand allows running tests on YAML rules written for analyzer-lsp. + +The input to test runner will be one or more test files and / or directories containing tests written in YAML. ```sh -kantra transform rules --input= --output= +kantra test /path/to/a/single/tests/file.test.yaml ``` -### analyze and transform [examples](./docs/example.md) +The output of tests is printed on the console. + +See different ways to run the test command in the [test runner doc](./docs/testrunner.md#running-tests) + +## References + +- [Example usage scenarios](./docs/examples.md) +- [Test runner for YAML rules](./docs/testrunner.md) ## Code of Conduct + Refer to Konveyor's Code of Conduct [here](https://github.com/konveyor/community/blob/main/CODE_OF_CONDUCT.md). diff --git a/docs/example.md b/docs/examples.md similarity index 100% rename from docs/example.md rename to docs/examples.md diff --git a/docs/testrunner.md b/docs/testrunner.md new file mode 100644 index 0000000..b17d45d --- /dev/null +++ b/docs/testrunner.md @@ -0,0 +1,233 @@ +# Test Runner for YAML rules + +Via the _test_ subcommand, _kantra_ exposes a test runner. + +It allows testing YAML rules written for [analyzer-lsp](https://github.com/konveyor/analyzer-lsp). + +The input to the test runner are tests written in YAML, the output of the test runner is a report. + +## Usage + +This section covers: + +1. [Writing tests](#writing-tests) +2. [Running tests](#running-tests) +3. [Understanding output](#test-output) + +### Writing tests + +Tests for a rules file are written in a YAML file with names ending in `.test.yaml` suffix. + +A tests file contains three fields _rulesPath_, _providers_ and _tests_ at the top level: + +```yaml +rulesPath: "/optional/path/to/rules/file" +providers: + - name: "go" + dataPath: "/path/to/data/for/this/provider" +tests: + - ruleID: "rule-id-for-this-test" + testCases: + - name: "test-case-name" + [...] +``` + +* _rulesPath_: Relative path to a file containing rules these tests are applicable to +* _providers_: List of configs, each containing configuration for a specific provider to be used when running tests +* _tests_: List of tests to run, each containing test definition for a specific rule in the associated rules file + +> Note that _rulesPath_ is optional. If it is not specified, the runner will look for a file in the same directory with the same name as the tests file except the _.tests.yaml_ suffix in the name. + +#### Defining providers + +The field _providers_ defines a list of configs, each specific to a provider: + +```yaml +providers: + - name: + dataPath: +tests: + [...] +``` + +_name_ is the name of the provider to which the config applies to, and _dataPath_ is the relative path to the test data to be used when testing rules for that provider. + +> Note that _dataPath_ must be relative to the directory in which tests file exists. + +If all tests under a _ruleset_ share values of _providers_ field (e.g. they use common data directory in all tests for a given provider), this config can also be defined at ruleset level under a special file `testing-config.yaml`. In that case, config present in this file will apply to all tests in that directory. A more specific config for a certain file can still be defined in the tests file. In that case, values in the tests file will take precedance over values at the _ruleset_ level. + +See an example of ruleset level config in [../pkg/testing/examples/ruleset/testing-config.yaml](../pkg/testing/examples/ruleset/testing-config.yaml). + +> Note that a config for every providers present in the rules file _must_ be defined. + +#### Defining tests + +The field _tests_ defines a list of tests, each specific to a rule in the rules file: + +```yaml +providers: + [...] +tests: + - ruleID: test-00 + testCases: + - name: test-tc-00 + analysisParams: + depLabelSelector: "!konveyor.io/source=open-source" + mode: "full" + hasIncidents: + exactly: 10 + messageMatches: "test" + codeSnipMatches: "test" + - name: test-tc-01 + analysisParams: + mode: "source-only" + hasTags: + - "test" + hasIncidents: + locations: + - lineNumber: 10 + fileURI: file://test + messageMatches: "message" +``` +###### Test + +| Field | Type | Required | Description | +| --------- | --------- | -------- | --------------------------------------------- | +| ruleID | string | Yes | ID of the rule this test applies to | +| testCases | []TestCase | Yes | List of test cases (See [TestCase](#testcase)) | + +###### TestCase + +| Field | Type | Required | Description | +| --------- | --------- | -------- | ------------------------------------------------------------------------------------------------------ | +| name | string | Yes | Unique name for the test case, can be used to filter test case. | +| analysisParams | AnalysisParams | Yes | Analysis parameters to use when running this test case (See [AnalysisParams](#analysisparams)) | +| hasIncidents | HasIncidents | No | Passing criteria that compares produced incidents (See [HasIncidents](#hasincidents)) | +| hasTags | []string | No | Passing criteria that compares produced tags, passes test case when all tags are present in the output | +| isUnmatched | bool | No | Passes the test case when rule is NOT matched | + +###### AnalysisParams + +| Field | Type | Required | Description | +| ---------------- | ------- | -------- | ------------------------------------------------------------------------------------ | +| depLabelSelector | string | No | Dependency label selector expression to pass as --dep-label-selector to the analyzer | +| mode | string | No | Analysis mode, one of - _source-only_ or _full_ | + +###### HasIncidents + +_HasIncidents_ defines a criteria for passing the test case. It provides two ways to define a criteria, either one of the two can be defined in a test case: + +1. _Count based_: This criteria is based on count of incidents. It can be defined using following fields under _hasIncidents_: + + | Field | Type | Required | Description | + | --------------- | ---- | -------- | --------------------------------------------------------------------------------------- | + | exactly | int | Yes | Produced incidents should be exactly equal to this number for test case to pass | + | atLeast | int | Yes | Produced incidents should be greater than or equal to this number for test case to pass | + | atMost | int | Yes | Produced incidents should be less than or equal to this number for test case to pass | + | messageMatches | int | No | In all incidents, message should match this pattern for test case to pass | + | codeSnipMatches | int | No | In all incidents, code snippet should match this pattern for test case to pass | + + > Only one of _exactly_, _atLeast_, or _atMost_ can be defined at a time + +2. _Location based_: This criteria is based on location of each incident. It can be defined using following fields under _hasIncidents_: + + | Field | Type | Required | Description | + | --------------- | ---- | -------- | ----------------------------------------------------------------------------------- | + | locations | []Location | No | Passing criteria that is based on location of each incident rather than just count | + + Each _Location_ has following fields: + + | Field | Type | Required | Description | + | --------------- | ---- | -------- | ------------------------------------------------------------------- | + | fileURI | string | Yes | An incident must be found in this file for test case to pass | + | lineNumber | string | Yes | An incident must be found on this line number for test case to pass | + | messageMatches | int | No | Message should match this pattern for test case to pass | + | codeSnipMatches | int | No | Code snippet should match this pattern for test case to pass | + +### Running tests + +To run tests in a single file: + +```yaml +kantra test /path/to/a/single/tests/file.test.yaml +``` + +To run tests in a ruleset: + +```yaml +kantra test /path/to/a/ruleset/directory/ +``` + +To run tests in multiple different paths: + +```yaml +kantra test /path/to/a/ruleset/directory/ /path/to/a/test/file.test.yaml +``` + +To run specific tests by rule IDs: + +```yaml +kantra test /path/to/a/ruleset/directory/ -t "RULE_ID_1, RULE_ID_2" +``` + +_-t_ option allows specifying a list of rule IDs (separated by commas) to select specific tests. + +A specific test case in a test can also be selected using the _-t_ option. + +To run specific test cases in a test, each value in the comma separated list of _-t_ becomes _#_: + +```yaml +kantra test /path/to/a/ruleset/directory/ -t RULE_ID_1#TEST_CASE_1 +``` + +> Note that # is a reserved character used to seperate test case name in the filter. The name of the test case itself _must not_ contain #. + +### Test Output + +When a test passes, the runner creates output that looks like: + +```sh +- 156-java-rmi.windup.test.yaml 2/2 PASSED + - java-rmi-00000 1/1 PASSED + - java-rmi-00001 1/1 PASSED +------------------------------------------------------------ + Rules Summary: 2/2 (100.00%) PASSED + Test Cases Summary: 2/2 (100.00%) PASSED +------------------------------------------------------------ +``` + +The runner will clean up all temporary directories when all tests in a file pass. + +If a test fails, the runner will create output that looks like: + +```sh +- 160-local-storage.windup.test.yaml 0/1 PASSED + - local-storage-00001 0/1 PASSED + - tc-1 FAILED + - expected at least 48 incidents, got 18 + - find debug data in /tmp/rules-test-242432604 +------------------------------------------------------------ + Rules Summary: 0/1 (0.00%) PASSED + Test Cases Summary: 0/1 (0.00%) PASSED +------------------------------------------------------------ +``` + +In this case, the runner leaves the temporary directories behind for debugging. In the above example, the temporary directory is `/tmp/rules-test-242432604`. + +Among other files, the important files needed for debugging in this directory are: + +* _analysis.log_: This file contains the full log of analysis +* _output.yaml_: This file contains the output generated post analysis +* _provider\_settings.json_: This file contains the provider settings used for analysis +* _rules.yaml_: This file contains the rules used for analysis +* _reproducer.sh_: This file contains a command you can run directly on your system to reproduce the analysis as-is. + +> In the temporary directory, there could be files generated by the providers including their own logs. Those files can be useful for debugging too. + +### References + +- OpenAPI schema for tests: [Tests schema](../test-schema.json) + +- Example tests for a ruleset: [Ruleset tests](../pkg/testing/examples/ruleset/) + +- Example tests for a rules file: [Rules file tests](../pkg/testing/examples/rules-file.test.yaml) From f8ce12b2dc5891e37cdcd50c15ff916122e84947 Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Wed, 28 Feb 2024 13:37:19 -0500 Subject: [PATCH 7/9] :ghost: dockerfile changes Signed-off-by: Pranav Gaikwad --- .github/workflows/testing.yaml | 7 ++++++- Dockerfile | 1 + 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/testing.yaml b/.github/workflows/testing.yaml index 526057f..9ac5bb7 100644 --- a/.github/workflows/testing.yaml +++ b/.github/workflows/testing.yaml @@ -12,7 +12,11 @@ jobs: run: | podman build -t localhost/kantra:latest -f Dockerfile . go build -o kantra main.go - + + - name: Run unit tests + run: | + RUNNER_IMG=localhost/kantra:latest go test ./... + - name: Fetch sample applications run: | git clone https://github.com/konveyor/example-applications @@ -22,6 +26,7 @@ jobs: run: | RUNNER_IMG=localhost/kantra:latest ./kantra analyze --input $(pwd)/example-applications/example-1/ --output ./output/ --rules ./test-data/jni-native-code-test.windup.xml --target cloud-readiness + # TODO (pgaikwad): Change this to a yaml test and run `kantra test` - name: Fail if analysis output does not match expected run: | expected_file=./test-data/analysis-output.yaml diff --git a/Dockerfile b/Dockerfile index 166939f..ce25ac2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,6 +22,7 @@ RUN go mod download # Copy the go source COPY main.go main.go COPY cmd/ cmd/ +COPY pkg/ pkg/ # Build ARG VERSION From 067af374a249ac3a74c2dc76922a612ee62fe92e Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Tue, 5 Mar 2024 12:15:35 -0500 Subject: [PATCH 8/9] address feedback Signed-off-by: Pranav Gaikwad --- docs/testrunner.md | 4 +- pkg/container/container.go | 4 +- .../ruleset/test-data/python/requirements.txt | 3 - pkg/testing/result.go | 2 +- pkg/testing/runner.go | 83 ++++++++++++------- 5 files changed, 57 insertions(+), 39 deletions(-) diff --git a/docs/testrunner.md b/docs/testrunner.md index b17d45d..73adbae 100644 --- a/docs/testrunner.md +++ b/docs/testrunner.md @@ -24,7 +24,7 @@ A tests file contains three fields _rulesPath_, _providers_ and _tests_ at the t rulesPath: "/optional/path/to/rules/file" providers: - name: "go" - dataPath: "/path/to/data/for/this/provider" + dataPath: "/path/to/test/data/for/this/provider" tests: - ruleID: "rule-id-for-this-test" testCases: @@ -54,7 +54,7 @@ _name_ is the name of the provider to which the config applies to, and _dataPath > Note that _dataPath_ must be relative to the directory in which tests file exists. -If all tests under a _ruleset_ share values of _providers_ field (e.g. they use common data directory in all tests for a given provider), this config can also be defined at ruleset level under a special file `testing-config.yaml`. In that case, config present in this file will apply to all tests in that directory. A more specific config for a certain file can still be defined in the tests file. In that case, values in the tests file will take precedance over values at the _ruleset_ level. +If all tests under a _ruleset_ share values of _providers_ field (e.g. they use common data directory in all tests for a given provider), this config can also be defined at ruleset level under a special file `testing-config.yaml`. In that case, the config present in this file will apply to all tests in that directory. A more specific config for a certain file can still be defined in the tests file. In that case, values in the tests file will take precedance over values at the _ruleset_ level. See an example of ruleset level config in [../pkg/testing/examples/ruleset/testing-config.yaml](../pkg/testing/examples/ruleset/testing-config.yaml). diff --git a/pkg/container/container.go b/pkg/container/container.go index 1dac315..195cc3a 100644 --- a/pkg/container/container.go +++ b/pkg/container/container.go @@ -190,7 +190,9 @@ func (c *container) Run(ctx context.Context, opts ...Option) error { args = append(args, c.entrypointArgs...) } if c.reproducerCmd != nil { - *c.reproducerCmd = fmt.Sprintf("%s %s", c.containerRuntimeBin, strings.Join(args, " ")) + reproducer := strings.ReplaceAll(strings.Join(args, " "), " --rm", "") + *c.reproducerCmd = fmt.Sprintf("%s %s", + c.containerRuntimeBin, reproducer) } cmd := exec.CommandContext(ctx, c.containerRuntimeBin, args...) errBytes := &bytes.Buffer{} diff --git a/pkg/testing/examples/ruleset/test-data/python/requirements.txt b/pkg/testing/examples/ruleset/test-data/python/requirements.txt index 69561e3..c2da307 100644 --- a/pkg/testing/examples/ruleset/test-data/python/requirements.txt +++ b/pkg/testing/examples/ruleset/test-data/python/requirements.txt @@ -4,7 +4,6 @@ charset-normalizer==3.2.0 docstring-to-markdown==0.12 google-auth==2.23.0 idna==3.4 -jedi==0.19.0 kubernetes==28.1.0 oauthlib==3.2.2 parso==0.8.3 @@ -12,8 +11,6 @@ pluggy==1.3.0 pyasn1==0.5.0 pyasn1-modules==0.3.0 python-dateutil==2.8.2 -python-lsp-jsonrpc==1.1.1 -python-lsp-server==1.8.0 PyYAML==6.0.1 requests==2.31.0 requests-oauthlib==1.3.1 diff --git a/pkg/testing/result.go b/pkg/testing/result.go index 4a2424b..8f0d57e 100644 --- a/pkg/testing/result.go +++ b/pkg/testing/result.go @@ -38,7 +38,7 @@ func PrintSummary(w io.WriteCloser, results []Result) { } summaryByRules[result.RuleID].total += 1 tcSummary.total += 1 - if len(result.FailureReasons) == 0 { + if result.Passed { summaryByRules[result.RuleID].passed += 1 tcSummary.passed += 1 } diff --git a/pkg/testing/runner.go b/pkg/testing/runner.go index 41a490e..534a2be 100644 --- a/pkg/testing/runner.go +++ b/pkg/testing/runner.go @@ -67,6 +67,7 @@ var defaultProviderConfig = []provider.Config{ ProviderSpecificConfig: map[string]interface{}{ "lspServerName": "generic", provider.LspServerPathConfigKey: "/root/go/bin/gopls", + "lspServerArgs": []string{}, "dependencyProviderPath": "/usr/bin/golang-dependency-provider", }, }, @@ -81,6 +82,7 @@ var defaultProviderConfig = []provider.Config{ ProviderSpecificConfig: map[string]interface{}{ "lspServerName": "pylsp", provider.LspServerPathConfigKey: "/usr/local/bin/pylsp", + "lspServerArgs": []string{}, "workspaceFolders": []string{}, "dependencyFolders": []string{}, }, @@ -242,6 +244,7 @@ func runWorker(wg *sync.WaitGroup, inChan chan workerInput, outChan chan []Resul logFile.Close() continue } + // we already know in this group, all tcs have same params, use any analysisParams := tests[0].TestCases[0].AnalysisParams // write provider settings file volumes, err := ensureProviderSettings(tempDir, input.opts.RunLocal, input.testsFile, baseProviderConfig, analysisParams) @@ -429,24 +432,11 @@ func ensureProviderSettings(tempDirPath string, runLocal bool, testsFile TestsFi // depending on whether we run locally, or in a container, we will either use local paths or mounted paths switch { case runLocal: - // when running locally, we use the paths as-is for _, override := range testsFile.Providers { + // when running locally, we use the paths as-is dataPath := filepath.Join(filepath.Dir(testsFile.Path), filepath.Clean(override.DataPath)) - for idx := range baseProviders { - base := &baseProviders[idx] - if base.Name == override.Name { - initConf := &base.InitConfig[0] - base.ContextLines = 100 - initConf.AnalysisMode = params.Mode - switch base.Name { - case "python", "go", "nodejs": - initConf.ProviderSpecificConfig["workspaceFolders"] = []string{dataPath} - default: - initConf.Location = dataPath - } - final = append(final, *base) - } - } + final = append(final, + getMergedProviderConfig(override.Name, baseProviders, params, dataPath, tempDirPath)...) } default: // in containers, we need to make sure we only mount unique path trees @@ -478,22 +468,10 @@ func ensureProviderSettings(tempDirPath string, runLocal bool, testsFile TestsFi volumes[filepath.Join(filepath.Dir(testsFile.Path), uniquePath)] = path.Join("/data", uniquePath) } for _, override := range testsFile.Providers { - mountedDataPath := path.Join("/data", filepath.Clean(override.DataPath)) - for idx := range baseProviders { - base := &baseProviders[idx] - base.ContextLines = 100 - if base.Name == override.Name { - initConf := &base.InitConfig[0] - initConf.AnalysisMode = params.Mode - switch base.Name { - case "python", "go", "nodejs": - initConf.ProviderSpecificConfig["workspaceFolders"] = []string{mountedDataPath} - default: - initConf.Location = mountedDataPath - } - final = append(final, *base) - } - } + // when running in the container, we use the mounted path + dataPath := filepath.Join("/data", filepath.Clean(override.DataPath)) + final = append(final, + getMergedProviderConfig(override.Name, baseProviders, params, dataPath, "/shared")...) } } content, err := json.Marshal(final) @@ -507,6 +485,47 @@ func ensureProviderSettings(tempDirPath string, runLocal bool, testsFile TestsFi return volumes, nil } +// getMergedProviderConfig for a given provider in the tests file, find a base provider config and +// merge values as per precedance (values in tests file take precedance) +func getMergedProviderConfig(name string, baseConfig []provider.Config, params AnalysisParams, dataPath string, outputPath string) []provider.Config { + merged := []provider.Config{} + for idx := range baseConfig { + base := &baseConfig[idx] + base.ContextLines = 100 + if base.Name == name { + initConf := &base.InitConfig[0] + if params.Mode != "" { + initConf.AnalysisMode = params.Mode + } + switch base.Name { + // languages enabled via generic provide use workspaceFolders instead of location + // we also enable detailed logging for different providers + case "python": + initConf.ProviderSpecificConfig["workspaceFolders"] = []string{dataPath} + // log things in the output directory for debugging + lspArgs, ok := initConf.ProviderSpecificConfig["lspServerArgs"].([]string) + if ok { + initConf.ProviderSpecificConfig["lspServerArgs"] = append(lspArgs, + "--log-file", path.Join(outputPath, "python-server.log"), "-vv") + } + case "go": + initConf.ProviderSpecificConfig["workspaceFolders"] = []string{dataPath} + lspArgs, ok := initConf.ProviderSpecificConfig["lspServerArgs"].([]string) + if ok { + initConf.ProviderSpecificConfig["lspServerArgs"] = append(lspArgs, + "--logfile", path.Join(outputPath, "go-server.log"), "-vv") + } + case "nodejs": + initConf.ProviderSpecificConfig["workspaceFolders"] = []string{dataPath} + default: + initConf.Location = dataPath + } + merged = append(merged, *base) + } + } + return merged +} + func groupTestsByAnalysisParams(tests []Test) [][]Test { grouped := map[string]map[string]*Test{} for _, t := range tests { From 2207b0370de1fc3f80f48e4f61f9d46b530d95a8 Mon Sep 17 00:00:00 2001 From: Pranav Gaikwad Date: Thu, 14 Mar 2024 13:12:16 -0400 Subject: [PATCH 9/9] :ghost: use tab printer Signed-off-by: Pranav Gaikwad --- pkg/testing/result.go | 77 ++++++++++++++----------------------------- pkg/testing/runner.go | 6 +++- 2 files changed, 29 insertions(+), 54 deletions(-) diff --git a/pkg/testing/result.go b/pkg/testing/result.go index 8f0d57e..1273d04 100644 --- a/pkg/testing/result.go +++ b/pkg/testing/result.go @@ -3,9 +3,9 @@ package testing import ( "fmt" "io" - "math" "path/filepath" "strings" + "text/tabwriter" ) // Result is a result of a test run @@ -62,14 +62,6 @@ func PrintProgress(w io.WriteCloser, results []Result) { // results grouped by their tests files, then rules, then test cases resultsByTestsFile := map[string]map[string][]Result{} errorsByTestsFile := map[string][]string{} - justifyLen := 0 - maxInt := func(a ...int) int { - maxInt := math.MinInt64 - for _, n := range a { - maxInt = int(math.Max(float64(n), float64(maxInt))) - } - return maxInt - } for _, result := range results { if result.Error != nil { if _, ok := errorsByTestsFile[result.TestsFilePath]; !ok { @@ -79,9 +71,6 @@ func PrintProgress(w io.WriteCloser, results []Result) { result.Error.Error()) continue } - justifyLen = maxInt(justifyLen, - len(filepath.Base(result.TestsFilePath))+3, - len(result.RuleID)+4, len(result.TestCaseName)+6) if _, ok := resultsByTestsFile[result.TestsFilePath]; !ok { resultsByTestsFile[result.TestsFilePath] = map[string][]Result{} } @@ -95,59 +84,41 @@ func PrintProgress(w io.WriteCloser, results []Result) { } } } - report := []string{} + prettyWriter := tabwriter.NewWriter(w, 1, 1, 1, ' ', tabwriter.StripEscape) for testsFile, resultsByRule := range resultsByTestsFile { - totalTestsInFile := len(resultsByRule) - passedTestsInFile := 0 - testsFileReport := []string{} - testsFileSummary := fmt.Sprintf("- %s%s%%d/%%d PASSED", - filepath.Base(testsFile), strings.Repeat(" ", justifyLen-len(filepath.Base(testsFile))-2)) - testsReport := []string{} - for ruleID, resultsByTCs := range resultsByRule { - totalTestCasesInTest := len(resultsByTCs) - passedTestCasesInTest := 0 - testReport := []string{} - testSummary := fmt.Sprintf("%+2s %s%s%%d/%%d PASSED", - "-", ruleID, strings.Repeat(" ", justifyLen-len(ruleID)-3)) - testCaseReport := []string{} - for _, tcResult := range resultsByTCs { + testsResult := "" + passedRules := 0 + for test, testCases := range resultsByRule { + passedTCs := 0 + tcsResult := "" + for _, tcResult := range testCases { + // only output failed test cases if !tcResult.Passed { - reasons := []string{} + tcsResult = fmt.Sprintf("%s %s\tFAILED\n", tcsResult, tcResult.TestCaseName) for _, reason := range tcResult.FailureReasons { - reasons = append(reasons, fmt.Sprintf("%+6s %s", "-", reason)) + tcsResult = fmt.Sprintf("%s - %s\t\n", tcsResult, reason) } for _, debugInfo := range tcResult.DebugInfo { - reasons = append(reasons, fmt.Sprintf("%+6s %s", "-", debugInfo)) + tcsResult = fmt.Sprintf("%s - %s\t\n", tcsResult, debugInfo) } - testCaseReport = append(testCaseReport, - fmt.Sprintf("%+4s %s%sFAILED", "-", - tcResult.TestCaseName, strings.Repeat(" ", justifyLen-len(tcResult.TestCaseName)-5))) - testCaseReport = append(testCaseReport, reasons...) } else { - passedTestCasesInTest += 1 + passedTCs += 1 } } - if passedTestCasesInTest == totalTestCasesInTest { - passedTestsInFile += 1 + if passedTCs == len(testCases) { + passedRules += 1 + } + testStat := fmt.Sprintf("%d/%d PASSED", passedTCs, len(testCases)) + testsResult = fmt.Sprintf("%s %s\t%s\n", testsResult, test, testStat) + if tcsResult != "" { + testsResult = fmt.Sprintf("%s%s", testsResult, tcsResult) } - testReport = append(testReport, - fmt.Sprintf(testSummary, passedTestCasesInTest, totalTestCasesInTest)) - testReport = append(testReport, testCaseReport...) - testsReport = append(testsReport, testReport...) - } - testsFileReport = append(testsFileReport, - fmt.Sprintf(testsFileSummary, passedTestsInFile, totalTestsInFile)) - testsFileReport = append(testsFileReport, testsReport...) - report = append(report, testsFileReport...) - } - for testsFile, errs := range errorsByTestsFile { - errorReport := []string{fmt.Sprintf("- %s FAILED", filepath.Base(testsFile))} - for _, e := range errs { - errorReport = append(errorReport, fmt.Sprintf("%+2s %s", "-", e)) } - report = append(report, errorReport...) + testsFileStat := fmt.Sprintf("%d/%d PASSED", passedRules, len(resultsByRule)) + fmt.Fprintf(prettyWriter, + "%s\t%s\n%s", filepath.Base(testsFile), testsFileStat, testsResult) } - fmt.Fprintln(w, strings.Join(report, "\n")) + prettyWriter.Flush() } func AnyFailed(results []Result) bool { diff --git a/pkg/testing/runner.go b/pkg/testing/runner.go index 534a2be..23a859c 100644 --- a/pkg/testing/runner.go +++ b/pkg/testing/runner.go @@ -304,7 +304,11 @@ func runWorker(wg *sync.WaitGroup, inChan chan workerInput, outChan chan []Resul RuleID: test.RuleID, TestCaseName: tc.Name, } - result.FailureReasons = tc.Verify(outputRulesets[0]) + if len(outputRulesets) > 0 { + result.FailureReasons = tc.Verify(outputRulesets[0]) + } else { + result.FailureReasons = []string{"empty output"} + } if len(result.FailureReasons) == 0 { result.Passed = true } else {