diff --git a/go.mod b/go.mod index 777d9aaac..ed17589f8 100644 --- a/go.mod +++ b/go.mod @@ -4,13 +4,16 @@ go 1.21 require ( github.com/confluentinc/confluent-kafka-go/v2 v2.3.0 + github.com/dominikbraun/graph v0.23.0 github.com/fxamacker/cbor/v2 v2.5.0 github.com/go-json-experiment/json v0.0.0-20231102232822-2e55bd4e08b0 + github.com/google/go-cmp v0.6.0 github.com/google/uuid v1.3.1 github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.0.0 github.com/hashicorp/consul/sdk v0.16.0 github.com/hashicorp/go-hclog v1.5.0 github.com/hashicorp/go-plugin v1.6.0 + github.com/invopop/jsonschema v0.12.0 github.com/jmoiron/sqlx v1.3.5 github.com/jonboulle/clockwork v0.4.0 github.com/jpillora/backoff v1.0.0 @@ -35,6 +38,7 @@ require ( golang.org/x/mod v0.14.0 google.golang.org/grpc v1.58.3 google.golang.org/protobuf v1.31.0 + sigs.k8s.io/yaml v1.4.0 ) require ( @@ -49,12 +53,10 @@ require ( github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/protobuf v1.5.3 // indirect - github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect - github.com/google/go-cmp v0.6.0 + github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect; indirec github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.0.0-rc.3 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect github.com/hashicorp/yamux v0.1.1 // indirect - github.com/invopop/jsonschema v0.12.0 github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.17 // indirect diff --git a/go.sum b/go.sum index 15edbd099..4521c5865 100644 --- a/go.sum +++ b/go.sum @@ -83,6 +83,8 @@ github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKoh github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dominikbraun/graph v0.23.0 h1:TdZB4pPqCLFxYhdyMFb1TBdFxp8XLcJfTTBQucVPgCo= +github.com/dominikbraun/graph v0.23.0/go.mod h1:yOjYyogZLY1LSG9E33JWZJiq5k83Qy2C6POAuiViluc= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -156,6 +158,7 @@ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= @@ -648,3 +651,5 @@ honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= +sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/pkg/workflows/dependency_graph.go b/pkg/workflows/dependency_graph.go new file mode 100644 index 000000000..843480670 --- /dev/null +++ b/pkg/workflows/dependency_graph.go @@ -0,0 +1,248 @@ +package workflows + +import ( + "errors" + "fmt" + "regexp" + "strings" + + "github.com/dominikbraun/graph" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" +) + +const ( + KeywordTrigger = "trigger" +) + +// StepDefinition is the parsed representation of a step in a workflow. +// +// Within the workflow spec, they are called "Capability Properties". +type StepDefinition struct { + ID string `json:"id" jsonschema:"required"` + Ref string `json:"ref,omitempty" jsonschema:"pattern=^[a-z0-9_]+$"` + Inputs map[string]any `json:"inputs,omitempty"` + Config map[string]any `json:"config" jsonschema:"required"` + + CapabilityType capabilities.CapabilityType `json:"-"` +} + +// WorkflowSpec is the parsed representation of a workflow. +type WorkflowSpec struct { + Triggers []StepDefinition `json:"triggers" jsonschema:"required"` + Actions []StepDefinition `json:"actions,omitempty"` + Consensus []StepDefinition `json:"consensus" jsonschema:"required"` + Targets []StepDefinition `json:"targets" jsonschema:"required"` +} + +func (w *WorkflowSpec) Steps() []StepDefinition { + s := []StepDefinition{} + s = append(s, w.Actions...) + s = append(s, w.Consensus...) + s = append(s, w.Targets...) + return s +} + +type Vertex struct { + StepDefinition + Dependencies []string +} + +// DependencyGraph is an intermediate representation of a workflow wherein all the graph +// vertices are represented and validated. It is a static representation of the workflow dependencies. +type DependencyGraph struct { + ID string + graph.Graph[string, *Vertex] + + Triggers []*StepDefinition + + Spec *WorkflowSpec +} + +// VID is an identifier for a Vertex that can be used to uniquely identify it in a graph. +// it represents the notion `hash` in the graph package AddVertex method. +// we refrain from naming it `hash` to avoid confusion with the hash function. +func (v *Vertex) VID() string { + return v.Ref +} + +func ParseDependencyGraph(yamlWorkflow string) (*DependencyGraph, error) { + spec, err := ParseWorkflowSpecYaml(yamlWorkflow) + if err != nil { + return nil, err + } + + // Construct and validate the graph. We instantiate an + // empty graph with just one starting entry: `trigger`. + // This provides the starting point for our graph and + // points to all dependent steps. + // Note: all triggers are represented by a single step called + // `trigger`. This is because for workflows with multiple triggers + // only one trigger will have started the workflow. + stepHash := func(s *Vertex) string { + return s.VID() + } + g := graph.New( + stepHash, + graph.PreventCycles(), + graph.Directed(), + ) + err = g.AddVertex(&Vertex{ + StepDefinition: StepDefinition{Ref: KeywordTrigger}, + }) + if err != nil { + return nil, err + } + + // Next, let's populate the other entries in the graph. + for _, s := range spec.Steps() { + // TODO: The workflow format spec doesn't always require a `Ref` + // to be provided (triggers and targets don't have a `Ref` for example). + // To handle this, we default the `Ref` to the type, but ideally we + // should find a better long-term way to handle this. + if s.Ref == "" { + s.Ref = s.ID + } + + innerErr := g.AddVertex(&Vertex{StepDefinition: s}) + if innerErr != nil { + return nil, fmt.Errorf("cannot add vertex %s: %w", s.Ref, innerErr) + } + } + + stepRefs, err := g.AdjacencyMap() + if err != nil { + return nil, err + } + + // Next, let's iterate over the steps and populate + // any edges. + for stepRef := range stepRefs { + step, innerErr := g.Vertex(stepRef) + if innerErr != nil { + return nil, innerErr + } + + refs, innerErr := findRefs(step.Inputs) + if innerErr != nil { + return nil, innerErr + } + step.Dependencies = refs + + if stepRef != KeywordTrigger && len(refs) == 0 { + return nil, errors.New("all non-trigger steps must have a dependent ref") + } + + for _, r := range refs { + innerErr = g.AddEdge(r, step.Ref) + if innerErr != nil { + return nil, innerErr + } + } + } + + triggerSteps := []*StepDefinition{} + for _, t := range spec.Triggers { + tt := t + triggerSteps = append(triggerSteps, &tt) + } + wf := &DependencyGraph{ + Spec: &spec, + Graph: g, + Triggers: triggerSteps, + } + return wf, err +} + +var ( + InterpolationTokenRe = regexp.MustCompile(`^\$\((\S+)\)$`) +) + +// findRefs takes an `inputs` map and returns a list of all the step references +// contained within it. +func findRefs(inputs map[string]any) ([]string, error) { + refs := []string{} + _, err := DeepMap( + inputs, + // This function is called for each string in the map + // for each string, we iterate over each match of the interpolation token + // - if there are no matches, return no reference + // - if there is one match, return the reference + // - if there are multiple matches (in the case of a multi-part state reference), return just the step ref + func(el string) (any, error) { + matches := InterpolationTokenRe.FindStringSubmatch(el) + if len(matches) < 2 { + return el, nil + } + + m := matches[1] + parts := strings.Split(m, ".") + if len(parts) < 1 { + return nil, fmt.Errorf("invalid ref %s", m) + } + + refs = append(refs, parts[0]) + return el, nil + }, + ) + return refs, err +} + +// DeepMap recursively applies a transformation function +// over each string within: +// +// - a map[string]any +// - a []any +// - a string +func DeepMap(input any, transform func(el string) (any, error)) (any, error) { + // in the case of a string, simply apply the transformation + // in the case of a map, recurse and apply the transformation to each value + // in the case of a list, recurse and apply the transformation to each element + switch tv := input.(type) { + case string: + nv, err := transform(tv) + if err != nil { + return nil, err + } + + return nv, nil + case mapping: + // coerce mapping to map[string]any + mp := map[string]any(tv) + + nm := map[string]any{} + for k, v := range mp { + nv, err := DeepMap(v, transform) + if err != nil { + return nil, err + } + + nm[k] = nv + } + return nm, nil + case map[string]any: + nm := map[string]any{} + for k, v := range tv { + nv, err := DeepMap(v, transform) + if err != nil { + return nil, err + } + + nm[k] = nv + } + return nm, nil + case []any: + a := []any{} + for _, el := range tv { + ne, err := DeepMap(el, transform) + if err != nil { + return nil, err + } + + a = append(a, ne) + } + return a, nil + } + + return nil, fmt.Errorf("cannot traverse item %+v of type %T", input, input) +} diff --git a/pkg/workflows/dependency_graph_test.go b/pkg/workflows/dependency_graph_test.go new file mode 100644 index 000000000..6d49db835 --- /dev/null +++ b/pkg/workflows/dependency_graph_test.go @@ -0,0 +1,245 @@ +package workflows_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/workflows" +) + +func TestParseDependencyGraph(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + yaml string + graph map[string]map[string]struct{} + errMsg string + }{ + { + name: "basic example", + yaml: ` +triggers: + - id: "a-trigger" + +actions: + - id: "an-action" + ref: "an-action" + inputs: + trigger_output: $(trigger.outputs) + +consensus: + - id: "a-consensus" + ref: "a-consensus" + inputs: + trigger_output: $(trigger.outputs) + an-action_output: $(an-action.outputs) + +targets: + - id: "a-target" + ref: "a-target" + inputs: + consensus_output: $(a-consensus.outputs) +`, + graph: map[string]map[string]struct{}{ + workflows.KeywordTrigger: { + "an-action": struct{}{}, + "a-consensus": struct{}{}, + }, + "an-action": { + "a-consensus": struct{}{}, + }, + "a-consensus": { + "a-target": struct{}{}, + }, + "a-target": {}, + }, + }, + { + name: "circular relationship", + yaml: ` +triggers: + - id: "a-trigger" + +actions: + - id: "an-action" + ref: "an-action" + inputs: + trigger_output: $(trigger.outputs) + output: $(a-second-action.outputs) + - id: "a-second-action" + ref: "a-second-action" + inputs: + output: $(an-action.outputs) + +consensus: + - id: "a-consensus" + ref: "a-consensus" + inputs: + trigger_output: $(trigger.outputs) + an-action_output: $(an-action.outputs) + +targets: + - id: "a-target" + ref: "a-target" + inputs: + consensus_output: $(a-consensus.outputs) +`, + errMsg: "edge would create a cycle", + }, + { + name: "indirect circular relationship", + yaml: ` +triggers: + - id: "a-trigger" + +actions: + - id: "an-action" + ref: "an-action" + inputs: + trigger_output: $(trigger.outputs) + action_output: $(a-third-action.outputs) + - id: "a-second-action" + ref: "a-second-action" + inputs: + output: $(an-action.outputs) + - id: "a-third-action" + ref: "a-third-action" + inputs: + output: $(a-second-action.outputs) + +consensus: + - id: "a-consensus" + ref: "a-consensus" + inputs: + trigger_output: $(trigger.outputs) + an-action_output: $(an-action.outputs) + +targets: + - id: "a-target" + ref: "a-target" + inputs: + consensus_output: $(a-consensus.outputs) +`, + errMsg: "edge would create a cycle", + }, + { + name: "relationship doesn't exist", + yaml: ` +triggers: + - id: "a-trigger" + +actions: + - id: "an-action" + ref: "an-action" + inputs: + trigger_output: $(trigger.outputs) + action_output: $(missing-action.outputs) + +consensus: + - id: "a-consensus" + ref: "a-consensus" + inputs: + an-action_output: $(an-action.outputs) + +targets: + - id: "a-target" + ref: "a-target" + inputs: + consensus_output: $(a-consensus.outputs) +`, + errMsg: "source vertex missing-action: vertex not found", + }, + { + name: "two trigger nodes", + yaml: ` +triggers: + - id: "a-trigger" + - id: "a-second-trigger" + +actions: + - id: "an-action" + ref: "an-action" + inputs: + trigger_output: $(trigger.outputs) + +consensus: + - id: "a-consensus" + ref: "a-consensus" + inputs: + an-action_output: $(an-action.outputs) + +targets: + - id: "a-target" + ref: "a-target" + inputs: + consensus_output: $(a-consensus.outputs) +`, + graph: map[string]map[string]struct{}{ + workflows.KeywordTrigger: { + "an-action": struct{}{}, + }, + "an-action": { + "a-consensus": struct{}{}, + }, + "a-consensus": { + "a-target": struct{}{}, + }, + "a-target": {}, + }, + }, + { + name: "non-trigger step with no dependent refs", + yaml: ` +triggers: + - id: "a-trigger" + - id: "a-second-trigger" +actions: + - id: "an-action" + ref: "an-action" + inputs: + hello: "world" +consensus: + - id: "a-consensus" + ref: "a-consensus" + inputs: + trigger_output: $(trigger.outputs) + action_output: $(an-action.outputs) +targets: + - id: "a-target" + ref: "a-target" + inputs: + consensus_output: $(a-consensus.outputs) +`, + errMsg: "all non-trigger steps must have a dependent ref", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(st *testing.T) { + //wf, err := workflows.Parse(tc.yaml) + wf, err := workflows.ParseDependencyGraph(tc.yaml) + if tc.errMsg != "" { + assert.ErrorContains(st, err, tc.errMsg) + } else { + require.NoError(st, err) + + adjacencies, err := wf.AdjacencyMap() + require.NoError(t, err) + + got := map[string]map[string]struct{}{} + for k, v := range adjacencies { + if _, ok := got[k]; !ok { + got[k] = map[string]struct{}{} + } + for adj := range v { + got[k][adj] = struct{}{} + } + } + + assert.Equal(st, tc.graph, got, adjacencies) + } + }) + } +} diff --git a/pkg/workflows/models_yaml.go b/pkg/workflows/models_yaml.go new file mode 100644 index 000000000..877d8835c --- /dev/null +++ b/pkg/workflows/models_yaml.go @@ -0,0 +1,342 @@ +package workflows + +import ( + "bytes" + "encoding/json" + "fmt" + "slices" + "strings" + + "github.com/invopop/jsonschema" + "github.com/shopspring/decimal" + "sigs.k8s.io/yaml" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" +) + +func GenerateJSONSchema() ([]byte, error) { + schema := jsonschema.Reflect(&workflowSpecYaml{}) + + return json.MarshalIndent(schema, "", " ") +} + +func ParseWorkflowSpecYaml(data string) (WorkflowSpec, error) { + w := workflowSpecYaml{} + err := yaml.Unmarshal([]byte(data), &w) + + return w.toWorkflowSpec(), err +} + +// workflowSpecYaml is the YAML representation of a workflow spec. +// +// It allows for multiple ways of defining a workflow spec, which we later +// convert to a single representation, `workflowSpec`. +type workflowSpecYaml struct { + // Triggers define a starting condition for the workflow, based on specific events or conditions. + Triggers []stepDefinitionYaml `json:"triggers" jsonschema:"required"` + // Actions represent a discrete operation within the workflow, potentially transforming input data. + Actions []stepDefinitionYaml `json:"actions,omitempty"` + // Consensus encapsulates the logic for aggregating and validating the results from various nodes. + Consensus []stepDefinitionYaml `json:"consensus" jsonschema:"required"` + // Targets represents the final step of the workflow, delivering the processed data to a specified location. + Targets []stepDefinitionYaml `json:"targets" jsonschema:"required"` +} + +// toWorkflowSpec converts a workflowSpecYaml to a workflowSpec. +// +// We support multiple ways of defining a workflow spec yaml, +// but internally we want to work with a single representation. +func (w workflowSpecYaml) toWorkflowSpec() WorkflowSpec { + triggers := make([]StepDefinition, 0, len(w.Triggers)) + for _, t := range w.Triggers { + sd := t.toStepDefinition() + sd.CapabilityType = capabilities.CapabilityTypeTrigger + triggers = append(triggers, sd) + } + + actions := make([]StepDefinition, 0, len(w.Actions)) + for _, a := range w.Actions { + sd := a.toStepDefinition() + sd.CapabilityType = capabilities.CapabilityTypeAction + actions = append(actions, sd) + } + + consensus := make([]StepDefinition, 0, len(w.Consensus)) + for _, c := range w.Consensus { + sd := c.toStepDefinition() + sd.CapabilityType = capabilities.CapabilityTypeConsensus + consensus = append(consensus, sd) + } + + targets := make([]StepDefinition, 0, len(w.Targets)) + for _, t := range w.Targets { + sd := t.toStepDefinition() + sd.CapabilityType = capabilities.CapabilityTypeTarget + targets = append(targets, sd) + } + + return WorkflowSpec{ + Triggers: triggers, + Actions: actions, + Consensus: consensus, + Targets: targets, + } +} + +type mapping map[string]any + +func (m *mapping) UnmarshalJSON(b []byte) error { + mp := map[string]any{} + + d := json.NewDecoder(bytes.NewReader(b)) + d.UseNumber() + + err := d.Decode(&mp) + if err != nil { + return err + } + + nm, err := convertNumbers(mp) + if err != nil { + return err + } + + *m = (mapping)(nm) + return err +} + +func convertNumber(el any) (any, error) { + switch elv := el.(type) { + case json.Number: + if strings.Contains(elv.String(), ".") { + f, err := elv.Float64() + if err == nil { + return decimal.NewFromFloat(f), nil + } + } + + return elv.Int64() + default: + return el, nil + } +} + +func convertNumbers(m map[string]any) (map[string]any, error) { + nm := map[string]any{} + for k, v := range m { + switch tv := v.(type) { + case map[string]any: + cm, err := convertNumbers(tv) + if err != nil { + return nil, err + } + + nm[k] = cm + case []any: + na := make([]any, len(tv)) + for i, v := range tv { + cv, err := convertNumber(v) + if err != nil { + return nil, err + } + + na[i] = cv + } + + nm[k] = na + default: + cv, err := convertNumber(v) + if err != nil { + return nil, err + } + + nm[k] = cv + } + } + + return nm, nil +} + +func (m mapping) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]any(m)) +} + +// stepDefinitionYaml is the YAML representation of a step in a workflow. +// +// It allows for multiple ways of defining a step, which we later +// convert to a single representation, `stepDefinition`. +type stepDefinitionYaml struct { + // A universally unique name for a capability will be defined under the “id” property. The uniqueness will, eventually, be enforced in the Capability Registry. + // + // Semver must be used to specify the version of the Capability at the end of the id field. Capability versions must be immutable. + // + // Initially, we will require major versions. This will ease upgrades early on while we develop the infrastructure. + // + // Eventually, we might support minor version and specific version pins. This will allow workflow authors to have flexibility when selecting the version, and node operators will be able to determine when they should update their capabilities. + // + // There are two ways to specify an id - using a string as a fully qualified ID or a structured table. When using a table, labels are ordered alphanumerically and joined into a string following a + // {name}:{label1_key}_{label1_value}:{label2_key}_{label2_value}@{version} + // pattern. + // + // The “id” supports [a-z0-9_-:] characters followed by an @ and [semver regex] at the end. + // + // Validation must throw an error if: + // + // Unsupported characters are used. + // (For Keystone only.) More specific than a major version is specified. + // + // Example (string) + // id: read_chain:chain_ethereum:network_mainnet@1 + // + // Example (table) + // + // id: + // name: read_chain + // version: 1 + // labels: + // chain: ethereum + // network: mainnet + // + // [semver regex]: https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string + ID stepDefinitionID `json:"id" jsonschema:"required"` + + // Actions and Consensus capabilities have a required “ref” property that must be unique within a Workflow file (not universally) This property enables referencing outputs and is required because Actions and Consensus always need to be referenced in the following phases. Triggers can optionally specify if they need to be referenced. + // + // The “ref” supports [a-z0-9_] characters. + // + // Validation must throw an error if: + // - Unsupported characters are used. + // - The same “ref” appears in the workflow multiple times. + // - “ref” is used on a Target capability. + // - “ref” has a circular reference. + // + // NOTE: Should introduce a custom validator to cover trigger case + Ref string `json:"ref,omitempty" jsonschema:"pattern=^[a-z0-9_-]+$"` + + // Capabilities can specify an additional optional ”inputs” property. It allows specifying a dependency on the result of one or more other capabilities. These are always runtime values that cannot be provided upfront. It takes a map of the argument name internal to the capability and an explicit reference to the values. + // + // References are specified using the [id].[ref].[path_to_value] pattern. + // + // The interpolation of “inputs” is allowed + // + // Validation must throw an error if: + // - Input reference cannot be resolved. + // - Input is defined on triggers + // NOTE: Should introduce a custom validator to cover trigger case + Inputs mapping `json:"inputs,omitempty"` + + // The configuration of a Capability will be done using the “config” property. Each capability is responsible for defining an external interface used during setup. This interface may be unique or identical, meaning multiple Capabilities might use the same configuration properties. + // + // The interpolation of “inputs” + // + // Interpolation of self inputs is allowed from within the “config” property. + // + // Example + // targets: + // - id: write_polygon_mainnet@1 + // inputs: + // report: + // - consensus.evm_median.outputs.report + // config: + // address: "0xaabbcc" + // method: "updateFeedValues(report bytes, role uint8)" + // params: [$(inputs.report), 1] + Config mapping `json:"config" jsonschema:"required"` +} + +// toStepDefinition converts a stepDefinitionYaml to a stepDefinition. +// +// `stepDefinition` is the converged representation of a step in a workflow. +func (s stepDefinitionYaml) toStepDefinition() StepDefinition { + return StepDefinition{ + Ref: s.Ref, + ID: s.ID.String(), + Inputs: s.Inputs, + Config: s.Config, + } +} + +// stepDefinitionID represents both the string and table representations of the "id" field in a stepDefinition. +type stepDefinitionID struct { + idStr string + idTable *stepDefinitionTableID +} + +func (s stepDefinitionID) String() string { + if s.idStr != "" { + return s.idStr + } + + return s.idTable.String() +} + +func (s *stepDefinitionID) UnmarshalJSON(data []byte) error { + // Unmarshal the JSON data into a map to determine if it's a string or a table + var m string + err := json.Unmarshal(data, &m) + if err == nil { + s.idStr = m + return nil + } + + // If the JSON data is a table, unmarshal it into a stepDefinitionTableID + var table stepDefinitionTableID + err = json.Unmarshal(data, &table) + if err != nil { + return err + } + s.idTable = &table + return nil +} + +func (s *stepDefinitionID) MarshalJSON() ([]byte, error) { + if s.idStr != "" { + return json.Marshal(s.idStr) + } + + return json.Marshal(s.idTable) +} + +// JSONSchema returns the JSON schema for a stepDefinitionID. +// +// The schema is a oneOf schema that allows either a string or a table. +func (stepDefinitionID) JSONSchema() *jsonschema.Schema { + reflector := jsonschema.Reflector{DoNotReference: true, ExpandedStruct: true} + tableSchema := reflector.Reflect(&stepDefinitionTableID{}) + stringSchema := &jsonschema.Schema{ + ID: "string", + Pattern: "^[a-z0-9_\\-:]+@(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + } + + return &jsonschema.Schema{ + Title: "id", + OneOf: []*jsonschema.Schema{ + stringSchema, + tableSchema, + }, + } +} + +// stepDefinitionTableID is the structured representation of a stepDefinitionID. +type stepDefinitionTableID struct { + Name string `json:"name"` + Version string `json:"version" jsonschema:"pattern=(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"` + Labels map[string]string `json:"labels"` +} + +// String returns the string representation of a stepDefinitionTableID. +// +// It follows the format: +// +// {name}:{label1_key}_{label1_value}:{label2_key}_{label2_value}@{version} +// +// where labels are ordered alphanumerically. +func (s stepDefinitionTableID) String() string { + labels := make([]string, 0, len(s.Labels)) + for k, v := range s.Labels { + labels = append(labels, fmt.Sprintf("%s_%s", k, v)) + } + slices.Sort(labels) + + return fmt.Sprintf("%s:%s@%s", s.Name, strings.Join(labels, ":"), s.Version) +} diff --git a/pkg/workflows/models_yaml_test.go b/pkg/workflows/models_yaml_test.go new file mode 100644 index 000000000..e7e638070 --- /dev/null +++ b/pkg/workflows/models_yaml_test.go @@ -0,0 +1,249 @@ +package workflows + +import ( + "encoding/json" + "fmt" + "os" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/santhosh-tekuri/jsonschema/v5" + "github.com/shopspring/decimal" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "sigs.k8s.io/yaml" +) + +var fixtureDir = "./testdata/fixtures/workflows/" + +// yamlFixtureReaderObj reads a yaml fixture file and returns the parsed object +func yamlFixtureReaderObj(t *testing.T, testCase string) func(name string) any { + testFixtureReader := yamlFixtureReaderBytes(t, testCase) + + return func(name string) any { + testFileBytes := testFixtureReader(name) + + var testFileYaml any + err := yaml.Unmarshal(testFileBytes, &testFileYaml) + require.NoError(t, err) + + return testFileYaml + } +} + +// yamlFixtureReaderBytes reads a yaml fixture file and returns the bytes +func yamlFixtureReaderBytes(t *testing.T, testCase string) func(name string) []byte { + return func(name string) []byte { + testFileBytes, err := os.ReadFile(fmt.Sprintf(fixtureDir+"%s/%s.yaml", testCase, name)) + require.NoError(t, err) + + return testFileBytes + } +} + +var transformJSON = cmp.FilterValues(func(x, y []byte) bool { + return json.Valid(x) && json.Valid(y) +}, cmp.Transformer("ParseJSON", func(in []byte) (out interface{}) { + if err := json.Unmarshal(in, &out); err != nil { + panic(err) // should never occur given previous filter to ensure valid JSON + } + return out +})) + +func TestWorkflowSpecMarshalling(t *testing.T) { + t.Parallel() + fixtureReader := yamlFixtureReaderBytes(t, "marshalling") + + t.Run("Type coercion", func(t *testing.T) { + workflowBytes := fixtureReader("workflow_1") + + spec := workflowSpecYaml{} + err := yaml.Unmarshal(workflowBytes, &spec) + require.NoError(t, err) + + // Test that our workflowSpec still keeps all of the original data + var rawSpec interface{} + err = yaml.Unmarshal(workflowBytes, &rawSpec) + require.NoError(t, err) + + workflowspecJSON, err := json.MarshalIndent(spec, "", " ") + require.NoError(t, err) + rawworkflowspecJSON, err := json.MarshalIndent(rawSpec, "", " ") + require.NoError(t, err) + + if diff := cmp.Diff(rawworkflowspecJSON, workflowspecJSON, transformJSON); diff != "" { + t.Errorf("ParseWorkflowWorkflowSpecFromString() mismatch (-want +got):\n%s", diff) + t.FailNow() + } + + // Spot check some fields + consensusConfig := spec.Consensus[0].Config + v, ok := consensusConfig["aggregation_config"] + require.True(t, ok, "expected aggregation_config to be present in consensus config") + + // the type of the keys present in v should be string rather than a number + // this is because JSON keys are always strings + _, ok = v.(map[string]any) + require.True(t, ok, "expected map[string]interface{} but got %T", v) + + // Make sure we dont have any weird type coercion with possible boolean values + booleanCoercions, ok := spec.Triggers[0].Config["boolean_coercion"].(map[string]any) + require.True(t, ok, "expected boolean_coercion to be present in triggers config") + + // check bools + bools, ok := booleanCoercions["bools"] + require.True(t, ok, "expected bools to be present in boolean_coercions") + for _, v := range bools.([]interface{}) { + _, ok = v.(bool) + require.True(t, ok, "expected bool but got %T", v) + } + + // check strings + strings, ok := booleanCoercions["strings"] + require.True(t, ok, "expected strings to be present in boolean_coercions") + for _, v := range strings.([]interface{}) { + _, ok = v.(string) + require.True(t, ok, "expected string but got %T", v) + } + + // check numbers + numbers, ok := booleanCoercions["numbers"] + require.True(t, ok, "expected numbers to be present in boolean_coercions") + for _, v := range numbers.([]interface{}) { + _, ok = v.(int64) + require.True(t, ok, "expected int64 but got %T", v) + } + }) + + t.Run("Table and string capability id", func(t *testing.T) { + workflowBytes := fixtureReader("workflow_2") + + spec := workflowSpecYaml{} + err := yaml.Unmarshal(workflowBytes, &spec) + require.NoError(t, err) + + // Test that our workflowSpec still keeps all of the original data + var rawSpec interface{} + err = yaml.Unmarshal(workflowBytes, &rawSpec) + require.NoError(t, err) + + workflowspecJSON, err := json.MarshalIndent(spec, "", " ") + require.NoError(t, err) + rawworkflowspecJSON, err := json.MarshalIndent(rawSpec, "", " ") + require.NoError(t, err) + + if diff := cmp.Diff(rawworkflowspecJSON, workflowspecJSON, transformJSON); diff != "" { + t.Errorf("ParseWorkflowWorkflowSpecFromString() mismatch (-want +got):\n%s", diff) + t.FailNow() + } + }) + + t.Run("Yaml spec to spec", func(t *testing.T) { + expectedSpecPath := fixtureDir + "marshalling/" + "workflow_2_spec.json" + workflowBytes := fixtureReader("workflow_2") + + workflowYaml := &workflowSpecYaml{} + err := yaml.Unmarshal(workflowBytes, workflowYaml) + require.NoError(t, err) + + workflowSpec := workflowYaml.toWorkflowSpec() + workflowSpecBytes, err := json.MarshalIndent(workflowSpec, "", " ") + require.NoError(t, err) + + // change this to update golden file + shouldUpdateWorkflowSpec := false + if shouldUpdateWorkflowSpec { + err = os.WriteFile(expectedSpecPath, workflowSpecBytes, 0600) + require.NoError(t, err) + } + + expectedSpecBytes, err := os.ReadFile(expectedSpecPath) + require.NoError(t, err) + diff := cmp.Diff(expectedSpecBytes, workflowSpecBytes, transformJSON) + if diff != "" { + t.Errorf("WorkflowYamlSpecToWorkflowSpec() mismatch (-want +got):\n%s", diff) + t.FailNow() + } + }) +} + +func TestJsonSchema(t *testing.T) { + t.Parallel() + t.Run("GenerateJsonSchema", func(t *testing.T) { + expectedSchemaPath := fixtureDir + "workflow_schema.json" + generatedSchema, err := GenerateJSONSchema() + require.NoError(t, err) + + // change this to update golden file + shouldUpdateSchema := false + if shouldUpdateSchema { + err = os.WriteFile(expectedSchemaPath, generatedSchema, 0600) + require.NoError(t, err) + } + + expectedSchema, err := os.ReadFile(expectedSchemaPath) + require.NoError(t, err) + diff := cmp.Diff(expectedSchema, generatedSchema, transformJSON) + if diff != "" { + t.Errorf("GenerateJsonSchema() mismatch (-want +got):\n%s", diff) + t.FailNow() + } + }) + + t.Run("ValidateJsonSchema", func(t *testing.T) { + generatedSchema, err := GenerateJSONSchema() + require.NoError(t, err) + + // test version regex + // for keystone, we should support major versions only along with prereleases and build metadata + t.Run("version", func(t *testing.T) { + readVersionFixture := yamlFixtureReaderObj(t, "versioning") + failingFixture1 := readVersionFixture("failing_1") + failingFixture2 := readVersionFixture("failing_2") + passingFixture1 := readVersionFixture("passing_1") + jsonSchema, err := jsonschema.CompileString("github.com/smartcontractkit/chainlink", string(generatedSchema)) + require.NoError(t, err) + + err = jsonSchema.Validate(failingFixture1) + require.Error(t, err) + + err = jsonSchema.Validate(failingFixture2) + require.Error(t, err) + + err = jsonSchema.Validate(passingFixture1) + require.NoError(t, err) + }) + + // test ref regex + t.Run("ref", func(t *testing.T) { + readRefFixture := yamlFixtureReaderObj(t, "references") + failingFixture1 := readRefFixture("failing_1") + passingFixture1 := readRefFixture("passing_1") + jsonSchema, err := jsonschema.CompileString("github.com/smartcontractkit/chainlink", string(generatedSchema)) + require.NoError(t, err) + + err = jsonSchema.Validate(failingFixture1) + require.Error(t, err) + + err = jsonSchema.Validate(passingFixture1) + require.NoError(t, err) + }) + }) +} + +func TestMappingCustomType(t *testing.T) { + m := mapping(map[string]any{}) + data := ` +{ + "foo": 100, + "bar": 100.00, + "baz": { "gnat": 11.10 } +}` + + err := m.UnmarshalJSON([]byte(data)) + require.NoError(t, err) + assert.Equal(t, int64(100), m["foo"], m) + assert.Equal(t, decimal.NewFromFloat(100.00), m["bar"], m) + assert.Equal(t, decimal.NewFromFloat(11.10), m["baz"].(map[string]any)["gnat"], m) +} diff --git a/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_1.yaml b/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_1.yaml new file mode 100644 index 000000000..9a9870af8 --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_1.yaml @@ -0,0 +1,88 @@ + triggers: + - id: mercury-trigger@1 + ref: report_data + config: + boolean_coercion: + bools: + - y + - n + - yes + - no + - Y + - N + - YES + - NO + - No + - Yes + - TRUE + - FALSE + - True + - False + - true + - false + strings: + - TruE + - FalsE + - "true" + - "false" + - "TRUE" + - "FALSE" + - t + - f + - "T" + - "F" + - "t" + - "f" + - "1" + - "0" + - "yes" + - "no" + - "y" + - "n" + - "YES" + - "NO" + - "Y" + - "N" + numbers: + - 1 + - 0 + feed_ids: + - 123 # ETHUSD + - 456 # LINKUSD + - 789 # USDBTC + + # no actions + + consensus: + - id: offchain_reporting@1 + inputs: + observations: + - triggers.report_data.outputs + config: + aggregation_method: data_feeds_2_0 + aggregation_config: + 123: # ETHUSD + deviation: "0.005" + heartbeat: 24h + test: + 456: # LINKUSD + deviation: "0.001" + heartbeat: 24h + 789: # USDBTC + deviation: "0.002" + heartbeat: 6h + encoder: EVM + encoder_config: + abi: "mercury_reports bytes[]" + + targets: + - id: write_polygon_mainnet@1 + inputs: + report: + - consensus.evm_median.outputs.report + config: + address: "0xaabbcc" + method: "updateFeedValues(report bytes, role uint8)" + params: [$(inputs.report), 1] + +# yaml-language-server: $schema=../workflow_schema.json diff --git a/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_2.yaml b/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_2.yaml new file mode 100644 index 000000000..be40a91da --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_2.yaml @@ -0,0 +1,28 @@ + triggers: + - id: on_mercury_report@1 + ref: report_data + config: {} + + # no actions + + consensus: + - id: + name: trigger_test + version: "2" + labels: + chain: ethereum + aaShouldBeFirst: "true" + network: mainnet + config: {} + inputs: + observations: + - triggers.report_data.outputs + + targets: + - id: write_polygon_mainnet@1 + config: {} + inputs: + report: + - consensus.evm_median.outputs.report + +# yaml-language-server: $schema=../workflow_schema.json diff --git a/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_2_spec.json b/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_2_spec.json new file mode 100644 index 000000000..000fa4692 --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/marshalling/workflow_2_spec.json @@ -0,0 +1,31 @@ +{ + "triggers": [ + { + "id": "on_mercury_report@1", + "ref": "report_data", + "config": {} + } + ], + "consensus": [ + { + "id": "trigger_test:aaShouldBeFirst_true:chain_ethereum:network_mainnet@2", + "inputs": { + "observations": [ + "triggers.report_data.outputs" + ] + }, + "config": {} + } + ], + "targets": [ + { + "id": "write_polygon_mainnet@1", + "inputs": { + "report": [ + "consensus.evm_median.outputs.report" + ] + }, + "config": {} + } + ] +} \ No newline at end of file diff --git a/pkg/workflows/testdata/fixtures/workflows/references/failing_1.yaml b/pkg/workflows/testdata/fixtures/workflows/references/failing_1.yaml new file mode 100644 index 000000000..b3c984e98 --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/references/failing_1.yaml @@ -0,0 +1,15 @@ +triggers: +- id: trigger_test@1 + config: {} + +consensus: + - id: offchain_reporting@1 + ref: offchain_reporting=1 + config: {} + +targets: + - id: write_polygon_mainnet@1 + ref: write_polygon_mainnet_1 + config: {} + +# yaml-language-server: $schema=../workflow_schema.json diff --git a/pkg/workflows/testdata/fixtures/workflows/references/passing_1.yaml b/pkg/workflows/testdata/fixtures/workflows/references/passing_1.yaml new file mode 100644 index 000000000..cb2f424e9 --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/references/passing_1.yaml @@ -0,0 +1,15 @@ +triggers: +- id: trigger_test@1 + config: {} + +consensus: + - id: offchain_reporting@1 + ref: offchain_reporting_1 + config: {} + +targets: + - id: write_polygon_mainnet@1 + ref: write_polygon_mainnet_1 + config: {} + +# yaml-language-server: $schema=../workflow_schema.json diff --git a/pkg/workflows/testdata/fixtures/workflows/versioning/failing_1.yaml b/pkg/workflows/testdata/fixtures/workflows/versioning/failing_1.yaml new file mode 100644 index 000000000..2e41eeb98 --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/versioning/failing_1.yaml @@ -0,0 +1,16 @@ +# Should fail since version is more specific than major +triggers: + - id: trigger_test@1.0 + config: {} + +consensus: + - id: offchain_reporting@1 + ref: offchain_reporting_1 + config: {} + +targets: + - id: write_polygon_mainnet@1 + ref: write_polygon_mainnet_1 + config: {} + +# yaml-language-server: $schema=../workflow_schema.json diff --git a/pkg/workflows/testdata/fixtures/workflows/versioning/failing_2.yaml b/pkg/workflows/testdata/fixtures/workflows/versioning/failing_2.yaml new file mode 100644 index 000000000..36cd5b68b --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/versioning/failing_2.yaml @@ -0,0 +1,17 @@ + +# Should fail since version is more specific than major +triggers: + - id: trigger_test@1.0.0 + config: {} + +consensus: + - id: offchain_reporting@1 + ref: offchain_reporting_1 + config: {} + +targets: + - id: write_polygon_mainnet@1 + ref: write_polygon_mainnet_1 + config: {} + +# yaml-language-server: $schema=../workflow_schema.json diff --git a/pkg/workflows/testdata/fixtures/workflows/versioning/passing_1.yaml b/pkg/workflows/testdata/fixtures/workflows/versioning/passing_1.yaml new file mode 100644 index 000000000..4579c2899 --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/versioning/passing_1.yaml @@ -0,0 +1,15 @@ + triggers: + - id: trigger_test@1 + config: {} + + consensus: + - id: offchain_reporting@1-beta.1 + ref: offchain_reporting_1 + config: {} + + targets: + - id: write_polygon_mainnet@1-alpha+sha246er3 + ref: write_polygon_mainnet_1 + config: {} + +# yaml-language-server: $schema=../workflow_schema.json diff --git a/pkg/workflows/testdata/fixtures/workflows/workflow_schema.json b/pkg/workflows/testdata/fixtures/workflows/workflow_schema.json new file mode 100644 index 000000000..1c7fbc850 --- /dev/null +++ b/pkg/workflows/testdata/fixtures/workflows/workflow_schema.json @@ -0,0 +1,103 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/smartcontractkit/chainlink-common/pkg/workflows/workflow-spec-yaml", + "$ref": "#/$defs/workflowSpecYaml", + "$defs": { + "mapping": { + "type": "object" + }, + "stepDefinitionID": { + "oneOf": [ + { + "$id": "string", + "pattern": "^[a-z0-9_\\-:]+@(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" + }, + { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/smartcontractkit/chainlink-common/pkg/workflows/step-definition-table-id", + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string", + "pattern": "(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "name", + "version", + "labels" + ] + } + ], + "title": "id" + }, + "stepDefinitionYaml": { + "properties": { + "id": { + "$ref": "#/$defs/stepDefinitionID" + }, + "ref": { + "type": "string", + "pattern": "^[a-z0-9_-]+$" + }, + "inputs": { + "$ref": "#/$defs/mapping" + }, + "config": { + "$ref": "#/$defs/mapping" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "id", + "config" + ] + }, + "workflowSpecYaml": { + "properties": { + "triggers": { + "items": { + "$ref": "#/$defs/stepDefinitionYaml" + }, + "type": "array" + }, + "actions": { + "items": { + "$ref": "#/$defs/stepDefinitionYaml" + }, + "type": "array" + }, + "consensus": { + "items": { + "$ref": "#/$defs/stepDefinitionYaml" + }, + "type": "array" + }, + "targets": { + "items": { + "$ref": "#/$defs/stepDefinitionYaml" + }, + "type": "array" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "triggers", + "consensus", + "targets" + ] + } + } +} \ No newline at end of file diff --git a/script/lint.sh b/script/lint.sh index 9ecb99138..3acaf62a8 100755 --- a/script/lint.sh +++ b/script/lint.sh @@ -2,7 +2,6 @@ # run_linter.sh # This script runs golangci-lint either locally or in a Docker container based on version matching. - # Parameters GOLANGCI_LINT_VERSION="$1" COMMON_OPTS="$2" @@ -16,7 +15,7 @@ mkdir -p "$DIRECTORY" DOCKER_CMD="docker run --rm -v $(pwd):/app -w /app golangci/golangci-lint:v$GOLANGCI_LINT_VERSION golangci-lint run $COMMON_OPTS $EXTRA_OPTS" if command -v golangci-lint >/dev/null 2>&1; then - LOCAL_VERSION=$(golangci-lint version 2>&1 | grep -oP 'version \K[\d.]+') + LOCAL_VERSION=$(golangci-lint version 2>&1 | grep -oE "version .[{0-9}.]+" | sed "s|version .||") if [ "$LOCAL_VERSION" = "$GOLANGCI_LINT_VERSION" ]; then echo "Local golangci-lint version ($LOCAL_VERSION) matches desired version ($GOLANGCI_LINT_VERSION). Using local version." @@ -32,3 +31,6 @@ else echo "Local golangci-lint not found. Using Docker version." $DOCKER_CMD > "$OUTPUT_FILE" fi + +echo "Linting complete. Results saved to $OUTPUT_FILE" + \ No newline at end of file