From 47488264908bee4eb5abebd73092f7a0ffb35fd2 Mon Sep 17 00:00:00 2001 From: Joel Takvorian Date: Wed, 19 Mar 2025 09:04:09 +0100 Subject: [PATCH 1/7] PoC dsl (no parsing at this point) --- pkg/dsl/dsl.go | 56 +++++++++++++++++++++++++++++ pkg/dsl/dsl_test.go | 85 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 141 insertions(+) create mode 100644 pkg/dsl/dsl.go create mode 100644 pkg/dsl/dsl_test.go diff --git a/pkg/dsl/dsl.go b/pkg/dsl/dsl.go new file mode 100644 index 000000000..2073c1c02 --- /dev/null +++ b/pkg/dsl/dsl.go @@ -0,0 +1,56 @@ +package dsl + +import ( + "github.com/netobserv/flowlogs-pipeline/pkg/api" + "github.com/netobserv/flowlogs-pipeline/pkg/config" + "github.com/netobserv/flowlogs-pipeline/pkg/utils/filters" +) + +type Node struct { + op string + leaf *api.KeepEntryRule + children []*Node + predicate filters.Predicate +} + +func Parse(s string) Node { + return Node{} +} + +func (n *Node) Preprocess() error { + if n.leaf != nil { + p, err := filters.FromKeepEntry(n.leaf) + if err != nil { + return err + } + n.predicate = p + return nil + } + for _, child := range n.children { + err := child.Preprocess() + if err != nil { + return err + } + } + return nil +} + +func (n *Node) Apply(flow config.GenericMap) bool { + if n.leaf != nil { + return n.predicate(flow) + } + if n.op == "and" { + for _, child := range n.children { + if !child.Apply(flow) { + return false + } + } + return true + } + for _, child := range n.children { + if child.Apply(flow) { + return true + } + } + return false +} diff --git a/pkg/dsl/dsl_test.go b/pkg/dsl/dsl_test.go new file mode 100644 index 000000000..40ec2e1bb --- /dev/null +++ b/pkg/dsl/dsl_test.go @@ -0,0 +1,85 @@ +package dsl + +import ( + "testing" + + "github.com/netobserv/flowlogs-pipeline/pkg/api" + "github.com/netobserv/flowlogs-pipeline/pkg/config" + "github.com/stretchr/testify/assert" +) + +func Test(t *testing.T) { + n := Node{ + op: "and", + children: []*Node{ + { + op: "or", + children: []*Node{ + { + leaf: &api.KeepEntryRule{ + Type: api.KeepEntryIfEqual, + KeepEntry: &api.TransformFilterGenericRule{ + Input: "srcnamespace", + Value: "netobserv", + }, + }, + }, + { + op: "and", + children: []*Node{ + { + leaf: &api.KeepEntryRule{ + Type: api.KeepEntryIfEqual, + KeepEntry: &api.TransformFilterGenericRule{ + Input: "srcnamespace", + Value: "ingress", + }, + }, + }, + { + leaf: &api.KeepEntryRule{ + Type: api.KeepEntryIfEqual, + KeepEntry: &api.TransformFilterGenericRule{ + Input: "dstnamespace", + Value: "netobserv", + }, + }, + }, + }, + }, + }, + }, + { + leaf: &api.KeepEntryRule{ + Type: api.KeepEntryIfEqual, + KeepEntry: &api.TransformFilterGenericRule{ + Input: "srckind", + Value: "pod", + }, + }, + }, + }, + } + n.Preprocess() + + result := n.Apply(config.GenericMap{ + "srcnamespace": "plop", + "dstnamespace": "netobserv", + "srckind": "pod", + }) + assert.False(t, result) + + result = n.Apply(config.GenericMap{ + "srcnamespace": "ingress", + "dstnamespace": "netobserv", + "srckind": "pod", + }) + assert.True(t, result) + + result = n.Apply(config.GenericMap{ + "srcnamespace": "ingress", + "dstnamespace": "netobserv", + "srckind": "service", + }) + assert.False(t, result) +} From 86930352435b956bdf0948b938a3225843d2b339 Mon Sep 17 00:00:00 2001 From: Joel Takvorian Date: Tue, 22 Apr 2025 15:58:56 +0200 Subject: [PATCH 2/7] Implement DSL based on goyacc --- .gitignore | 1 + go.mod | 2 +- pkg/dsl/dsl.go | 56 ----- pkg/dsl/dsl_test.go | 85 ------- pkg/dsl/eval.go | 33 +++ pkg/dsl/eval_test.go | 109 +++++++++ pkg/dsl/expr.y | 34 +++ pkg/dsl/expr.y.go | 520 +++++++++++++++++++++++++++++++++++++++++++ pkg/dsl/lexer.go | 185 +++++++++++++++ 9 files changed, 883 insertions(+), 142 deletions(-) delete mode 100644 pkg/dsl/dsl.go delete mode 100644 pkg/dsl/dsl_test.go create mode 100644 pkg/dsl/eval.go create mode 100644 pkg/dsl/eval_test.go create mode 100644 pkg/dsl/expr.y create mode 100644 pkg/dsl/expr.y.go create mode 100644 pkg/dsl/lexer.go diff --git a/.gitignore b/.gitignore index 16ce1e8da..6cadf42c6 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ /confgenerator /bin/ cover.out +y.output diff --git a/go.mod b/go.mod index bd1fbfc24..e889cfada 100644 --- a/go.mod +++ b/go.mod @@ -24,6 +24,7 @@ require ( github.com/prometheus/client_golang v1.22.0 github.com/prometheus/client_model v0.6.2 github.com/prometheus/common v0.63.0 + github.com/prometheus/prometheus v1.8.2-0.20201028100903-3245b3267b24 github.com/segmentio/kafka-go v0.4.47 github.com/sirupsen/logrus v1.9.3 github.com/spf13/cobra v1.9.1 @@ -114,7 +115,6 @@ require ( github.com/pion/transport/v2 v2.2.10 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/procfs v0.16.0 // indirect - github.com/prometheus/prometheus v1.8.2-0.20201028100903-3245b3267b24 // indirect github.com/rs/xid v1.6.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/safchain/ethtool v0.5.10 // indirect diff --git a/pkg/dsl/dsl.go b/pkg/dsl/dsl.go deleted file mode 100644 index 2073c1c02..000000000 --- a/pkg/dsl/dsl.go +++ /dev/null @@ -1,56 +0,0 @@ -package dsl - -import ( - "github.com/netobserv/flowlogs-pipeline/pkg/api" - "github.com/netobserv/flowlogs-pipeline/pkg/config" - "github.com/netobserv/flowlogs-pipeline/pkg/utils/filters" -) - -type Node struct { - op string - leaf *api.KeepEntryRule - children []*Node - predicate filters.Predicate -} - -func Parse(s string) Node { - return Node{} -} - -func (n *Node) Preprocess() error { - if n.leaf != nil { - p, err := filters.FromKeepEntry(n.leaf) - if err != nil { - return err - } - n.predicate = p - return nil - } - for _, child := range n.children { - err := child.Preprocess() - if err != nil { - return err - } - } - return nil -} - -func (n *Node) Apply(flow config.GenericMap) bool { - if n.leaf != nil { - return n.predicate(flow) - } - if n.op == "and" { - for _, child := range n.children { - if !child.Apply(flow) { - return false - } - } - return true - } - for _, child := range n.children { - if child.Apply(flow) { - return true - } - } - return false -} diff --git a/pkg/dsl/dsl_test.go b/pkg/dsl/dsl_test.go deleted file mode 100644 index 40ec2e1bb..000000000 --- a/pkg/dsl/dsl_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package dsl - -import ( - "testing" - - "github.com/netobserv/flowlogs-pipeline/pkg/api" - "github.com/netobserv/flowlogs-pipeline/pkg/config" - "github.com/stretchr/testify/assert" -) - -func Test(t *testing.T) { - n := Node{ - op: "and", - children: []*Node{ - { - op: "or", - children: []*Node{ - { - leaf: &api.KeepEntryRule{ - Type: api.KeepEntryIfEqual, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "srcnamespace", - Value: "netobserv", - }, - }, - }, - { - op: "and", - children: []*Node{ - { - leaf: &api.KeepEntryRule{ - Type: api.KeepEntryIfEqual, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "srcnamespace", - Value: "ingress", - }, - }, - }, - { - leaf: &api.KeepEntryRule{ - Type: api.KeepEntryIfEqual, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "dstnamespace", - Value: "netobserv", - }, - }, - }, - }, - }, - }, - }, - { - leaf: &api.KeepEntryRule{ - Type: api.KeepEntryIfEqual, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "srckind", - Value: "pod", - }, - }, - }, - }, - } - n.Preprocess() - - result := n.Apply(config.GenericMap{ - "srcnamespace": "plop", - "dstnamespace": "netobserv", - "srckind": "pod", - }) - assert.False(t, result) - - result = n.Apply(config.GenericMap{ - "srcnamespace": "ingress", - "dstnamespace": "netobserv", - "srckind": "pod", - }) - assert.True(t, result) - - result = n.Apply(config.GenericMap{ - "srcnamespace": "ingress", - "dstnamespace": "netobserv", - "srckind": "service", - }) - assert.False(t, result) -} diff --git a/pkg/dsl/eval.go b/pkg/dsl/eval.go new file mode 100644 index 000000000..f37604b29 --- /dev/null +++ b/pkg/dsl/eval.go @@ -0,0 +1,33 @@ +package dsl + +import ( + "github.com/netobserv/flowlogs-pipeline/pkg/config" + "github.com/netobserv/flowlogs-pipeline/pkg/utils/filters" +) + +type tree struct { + logicalOp string + children []*tree + predicate filters.Predicate +} + +func (t *tree) apply(flow config.GenericMap) bool { + if t.predicate != nil { + return t.predicate(flow) + } + if t.logicalOp == operatorAnd { + for _, child := range t.children { + if !child.apply(flow) { + return false + } + } + return true + } + // t.logicalOp == operatorOr + for _, child := range t.children { + if child.apply(flow) { + return true + } + } + return false +} diff --git a/pkg/dsl/eval_test.go b/pkg/dsl/eval_test.go new file mode 100644 index 000000000..67f45aa73 --- /dev/null +++ b/pkg/dsl/eval_test.go @@ -0,0 +1,109 @@ +package dsl + +import ( + "testing" + + "github.com/netobserv/flowlogs-pipeline/pkg/config" + "github.com/stretchr/testify/assert" +) + +func TestAndOrEqual(t *testing.T) { + yyErrorVerbose = true + predicate, err := Parse(`(srcnamespace="netobserv" OR (srcnamespace="ingress" AND dstnamespace="netobserv")) AND srckind!="service"`) + assert.NoError(t, err) + assert.NotNil(t, predicate) + + result := predicate(config.GenericMap{ + "srcnamespace": "plop", + "dstnamespace": "netobserv", + "srckind": "pod", + }) + assert.False(t, result) + + result = predicate(config.GenericMap{ + "srcnamespace": "ingress", + "dstnamespace": "netobserv", + "srckind": "pod", + }) + assert.True(t, result) + + result = predicate(config.GenericMap{ + "srcnamespace": "ingress", + "dstnamespace": "netobserv", + "srckind": "service", + }) + assert.False(t, result) +} + +func TestRegexp(t *testing.T) { + yyErrorVerbose = true + predicate, err := Parse(`srcnamespace=~"openshift.*" and dstnamespace!~"openshift.*"`) + assert.NoError(t, err) + assert.NotNil(t, predicate) + + result := predicate(config.GenericMap{ + "srcnamespace": "openshift-ingress", + "dstnamespace": "my-app", + "srckind": "pod", + }) + assert.True(t, result, "Should accept flows from OpenShift to App") + + result = predicate(config.GenericMap{ + "srcnamespace": "my-app", + "dstnamespace": "openshift-ingress", + "srckind": "pod", + }) + assert.False(t, result, "Should reject flows from App to OpenShift") + + result = predicate(config.GenericMap{ + "srcnamespace": "my-app", + "dstnamespace": "my-app", + "srckind": "pod", + }) + assert.False(t, result, "Should reject flows from App to App") + + result = predicate(config.GenericMap{ + "srcnamespace": "openshift-operators", + "dstnamespace": "openshift-ingress", + "srckind": "pod", + }) + assert.False(t, result, "Should reject flows from OpenShift to OpenShift") +} + +func TestWith(t *testing.T) { + yyErrorVerbose = true + predicate, err := Parse(`srcnamespace="foo" and with(rtt)`) + assert.NoError(t, err) + assert.NotNil(t, predicate) + + result := predicate(config.GenericMap{ + "srcnamespace": "foo", + "rtt": 4.5, + }) + assert.True(t, result, "Should accept flows from foo with rtt") + + result = predicate(config.GenericMap{ + "srcnamespace": "foo", + }) + assert.False(t, result, "Should reject flows from foo without rtt") +} + +func TestWithout(t *testing.T) { + yyErrorVerbose = true + predicate, err := Parse(`srcnamespace="foo" or without(srcnamespace)`) + assert.NoError(t, err) + assert.NotNil(t, predicate) + + result := predicate(config.GenericMap{ + "srcnamespace": "foo", + }) + assert.True(t, result, "Should accept flows from foo") + + result = predicate(config.GenericMap{}) + assert.True(t, result, "Should accept flows without srcnamespace") + + result = predicate(config.GenericMap{ + "srcnamespace": "bar", + }) + assert.False(t, result, "Should reject flows from bar") +} diff --git a/pkg/dsl/expr.y b/pkg/dsl/expr.y new file mode 100644 index 000000000..7d7a1e15e --- /dev/null +++ b/pkg/dsl/expr.y @@ -0,0 +1,34 @@ +%{ +package dsl +%} + +%union{ + expr Expression + value string +} + +%type root +%type expr + +%token VAR STRING NUMBER AND OR EQ NEQ REG NREG OPEN_PARENTHESIS CLOSE_PARENTHESIS WITH WITHOUT +%left AND +%left OR +%% + +root: + expr { + $$ = $1 + yylex.(*Lexer).result = $$ + } + +expr: + OPEN_PARENTHESIS expr CLOSE_PARENTHESIS { $$ = ParenthesisExpr{inner: $2} } + | expr AND expr { $$ = LogicalExpr{left: $1, operator: operatorAnd, right: $3} } + | expr OR expr { $$ = LogicalExpr{left: $1, operator: operatorOr, right: $3} } + | WITH OPEN_PARENTHESIS VAR CLOSE_PARENTHESIS { $$ = WithExpr{key: $3} } + | WITHOUT OPEN_PARENTHESIS VAR CLOSE_PARENTHESIS { $$ = WithoutExpr{key: $3} } + | VAR EQ STRING { $$ = EqExpr{key: $1, value: $3} } + | VAR NEQ STRING { $$ = NEqExpr{key: $1, value: $3} } + | VAR REG STRING { $$ = RegExpr{key: $1, value: $3} } + | VAR NREG STRING { $$ = NRegExpr{key: $1, value: $3} } +%% diff --git a/pkg/dsl/expr.y.go b/pkg/dsl/expr.y.go new file mode 100644 index 000000000..bd79c76f2 --- /dev/null +++ b/pkg/dsl/expr.y.go @@ -0,0 +1,520 @@ +// Code generated by goyacc -o pkg/dsl/expr.y.go pkg/dsl/expr.y. DO NOT EDIT. + +//line pkg/dsl/expr.y:2 +package dsl + +import __yyfmt__ "fmt" + +//line pkg/dsl/expr.y:2 + +//line pkg/dsl/expr.y:5 +type yySymType struct { + yys int + expr Expression + value string +} + +const VAR = 57346 +const STRING = 57347 +const NUMBER = 57348 +const AND = 57349 +const OR = 57350 +const EQ = 57351 +const NEQ = 57352 +const REG = 57353 +const NREG = 57354 +const OPEN_PARENTHESIS = 57355 +const CLOSE_PARENTHESIS = 57356 +const WITH = 57357 +const WITHOUT = 57358 + +var yyToknames = [...]string{ + "$end", + "error", + "$unk", + "VAR", + "STRING", + "NUMBER", + "AND", + "OR", + "EQ", + "NEQ", + "REG", + "NREG", + "OPEN_PARENTHESIS", + "CLOSE_PARENTHESIS", + "WITH", + "WITHOUT", +} + +var yyStatenames = [...]string{} + +const yyEofCode = 1 +const yyErrCode = 2 +const yyInitialStackSize = 16 + +//line pkg/dsl/expr.y:34 + +//line yacctab:1 +var yyExca = [...]int{ + -1, 1, + 1, -1, + -2, 0, +} + +const yyPrivate = 57344 + +const yyLast = 29 + +var yyAct = [...]int{ + 6, 7, 8, 26, 12, 13, 14, 15, 18, 3, + 11, 4, 5, 25, 10, 2, 7, 8, 8, 9, + 24, 23, 22, 16, 17, 21, 20, 19, 1, +} + +var yyPact = [...]int{ + -4, -1000, 9, -4, 1, -3, -5, -4, -4, -6, + 23, 22, 20, 17, 16, 15, 10, -1000, -1000, -1, + -11, -1000, -1000, -1000, -1000, -1000, -1000, +} + +var yyPgo = [...]int{ + 0, 28, 15, +} + +var yyR1 = [...]int{ + 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, + 2, +} + +var yyR2 = [...]int{ + 0, 1, 3, 3, 3, 4, 4, 3, 3, 3, + 3, +} + +var yyChk = [...]int{ + -1000, -1, -2, 13, 15, 16, 4, 7, 8, -2, + 13, 13, 9, 10, 11, 12, -2, -2, 14, 4, + 4, 5, 5, 5, 5, 14, 14, +} + +var yyDef = [...]int{ + 0, -2, 1, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 3, 4, 2, 0, + 0, 7, 8, 9, 10, 5, 6, +} + +var yyTok1 = [...]int{ + 1, +} + +var yyTok2 = [...]int{ + 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, +} + +var yyTok3 = [...]int{ + 0, +} + +var yyErrorMessages = [...]struct { + state int + token int + msg string +}{} + +//line yaccpar:1 + +/* parser for yacc output */ + +var ( + yyDebug = 0 + yyErrorVerbose = false +) + +type yyLexer interface { + Lex(lval *yySymType) int + Error(s string) +} + +type yyParser interface { + Parse(yyLexer) int + Lookahead() int +} + +type yyParserImpl struct { + lval yySymType + stack [yyInitialStackSize]yySymType + char int +} + +func (p *yyParserImpl) Lookahead() int { + return p.char +} + +func yyNewParser() yyParser { + return &yyParserImpl{} +} + +const yyFlag = -1000 + +func yyTokname(c int) string { + if c >= 1 && c-1 < len(yyToknames) { + if yyToknames[c-1] != "" { + return yyToknames[c-1] + } + } + return __yyfmt__.Sprintf("tok-%v", c) +} + +func yyStatname(s int) string { + if s >= 0 && s < len(yyStatenames) { + if yyStatenames[s] != "" { + return yyStatenames[s] + } + } + return __yyfmt__.Sprintf("state-%v", s) +} + +func yyErrorMessage(state, lookAhead int) string { + const TOKSTART = 4 + + if !yyErrorVerbose { + return "syntax error" + } + + for _, e := range yyErrorMessages { + if e.state == state && e.token == lookAhead { + return "syntax error: " + e.msg + } + } + + res := "syntax error: unexpected " + yyTokname(lookAhead) + + // To match Bison, suggest at most four expected tokens. + expected := make([]int, 0, 4) + + // Look for shiftable tokens. + base := yyPact[state] + for tok := TOKSTART; tok-1 < len(yyToknames); tok++ { + if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok { + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + } + + if yyDef[state] == -2 { + i := 0 + for yyExca[i] != -1 || yyExca[i+1] != state { + i += 2 + } + + // Look for tokens that we accept or reduce. + for i += 2; yyExca[i] >= 0; i += 2 { + tok := yyExca[i] + if tok < TOKSTART || yyExca[i+1] == 0 { + continue + } + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + + // If the default action is to accept or reduce, give up. + if yyExca[i+1] != 0 { + return res + } + } + + for i, tok := range expected { + if i == 0 { + res += ", expecting " + } else { + res += " or " + } + res += yyTokname(tok) + } + return res +} + +func yylex1(lex yyLexer, lval *yySymType) (char, token int) { + token = 0 + char = lex.Lex(lval) + if char <= 0 { + token = yyTok1[0] + goto out + } + if char < len(yyTok1) { + token = yyTok1[char] + goto out + } + if char >= yyPrivate { + if char < yyPrivate+len(yyTok2) { + token = yyTok2[char-yyPrivate] + goto out + } + } + for i := 0; i < len(yyTok3); i += 2 { + token = yyTok3[i+0] + if token == char { + token = yyTok3[i+1] + goto out + } + } + +out: + if token == 0 { + token = yyTok2[1] /* unknown char */ + } + if yyDebug >= 3 { + __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char)) + } + return char, token +} + +func yyParse(yylex yyLexer) int { + return yyNewParser().Parse(yylex) +} + +func (yyrcvr *yyParserImpl) Parse(yylex yyLexer) int { + var yyn int + var yyVAL yySymType + var yyDollar []yySymType + _ = yyDollar // silence set and not used + yyS := yyrcvr.stack[:] + + Nerrs := 0 /* number of errors */ + Errflag := 0 /* error recovery flag */ + yystate := 0 + yyrcvr.char = -1 + yytoken := -1 // yyrcvr.char translated into internal numbering + defer func() { + // Make sure we report no lookahead when not parsing. + yystate = -1 + yyrcvr.char = -1 + yytoken = -1 + }() + yyp := -1 + goto yystack + +ret0: + return 0 + +ret1: + return 1 + +yystack: + /* put a state and value onto the stack */ + if yyDebug >= 4 { + __yyfmt__.Printf("char %v in %v\n", yyTokname(yytoken), yyStatname(yystate)) + } + + yyp++ + if yyp >= len(yyS) { + nyys := make([]yySymType, len(yyS)*2) + copy(nyys, yyS) + yyS = nyys + } + yyS[yyp] = yyVAL + yyS[yyp].yys = yystate + +yynewstate: + yyn = yyPact[yystate] + if yyn <= yyFlag { + goto yydefault /* simple state */ + } + if yyrcvr.char < 0 { + yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) + } + yyn += yytoken + if yyn < 0 || yyn >= yyLast { + goto yydefault + } + yyn = yyAct[yyn] + if yyChk[yyn] == yytoken { /* valid shift */ + yyrcvr.char = -1 + yytoken = -1 + yyVAL = yyrcvr.lval + yystate = yyn + if Errflag > 0 { + Errflag-- + } + goto yystack + } + +yydefault: + /* default state action */ + yyn = yyDef[yystate] + if yyn == -2 { + if yyrcvr.char < 0 { + yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) + } + + /* look through exception table */ + xi := 0 + for { + if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate { + break + } + xi += 2 + } + for xi += 2; ; xi += 2 { + yyn = yyExca[xi+0] + if yyn < 0 || yyn == yytoken { + break + } + } + yyn = yyExca[xi+1] + if yyn < 0 { + goto ret0 + } + } + if yyn == 0 { + /* error ... attempt to resume parsing */ + switch Errflag { + case 0: /* brand new error */ + yylex.Error(yyErrorMessage(yystate, yytoken)) + Nerrs++ + if yyDebug >= 1 { + __yyfmt__.Printf("%s", yyStatname(yystate)) + __yyfmt__.Printf(" saw %s\n", yyTokname(yytoken)) + } + fallthrough + + case 1, 2: /* incompletely recovered error ... try again */ + Errflag = 3 + + /* find a state where "error" is a legal shift action */ + for yyp >= 0 { + yyn = yyPact[yyS[yyp].yys] + yyErrCode + if yyn >= 0 && yyn < yyLast { + yystate = yyAct[yyn] /* simulate a shift of "error" */ + if yyChk[yystate] == yyErrCode { + goto yystack + } + } + + /* the current p has no shift on "error", pop stack */ + if yyDebug >= 2 { + __yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys) + } + yyp-- + } + /* there is no state on the stack with an error shift ... abort */ + goto ret1 + + case 3: /* no shift yet; clobber input char */ + if yyDebug >= 2 { + __yyfmt__.Printf("error recovery discards %s\n", yyTokname(yytoken)) + } + if yytoken == yyEofCode { + goto ret1 + } + yyrcvr.char = -1 + yytoken = -1 + goto yynewstate /* try again in the same state */ + } + } + + /* reduction by production yyn */ + if yyDebug >= 2 { + __yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate)) + } + + yynt := yyn + yypt := yyp + _ = yypt // guard against "declared and not used" + + yyp -= yyR2[yyn] + // yyp is now the index of $0. Perform the default action. Iff the + // reduced production is ε, $1 is possibly out of range. + if yyp+1 >= len(yyS) { + nyys := make([]yySymType, len(yyS)*2) + copy(nyys, yyS) + yyS = nyys + } + yyVAL = yyS[yyp+1] + + /* consult goto table to find next state */ + yyn = yyR1[yyn] + yyg := yyPgo[yyn] + yyj := yyg + yyS[yyp].yys + 1 + + if yyj >= yyLast { + yystate = yyAct[yyg] + } else { + yystate = yyAct[yyj] + if yyChk[yystate] != -yyn { + yystate = yyAct[yyg] + } + } + // dummy call; replaced with literal code + switch yynt { + + case 1: + yyDollar = yyS[yypt-1 : yypt+1] +//line pkg/dsl/expr.y:19 + { + yyVAL.expr = yyDollar[1].expr + yylex.(*Lexer).result = yyVAL.expr + } + case 2: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:25 + { + yyVAL.expr = ParenthesisExpr{inner: yyDollar[2].expr} + } + case 3: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:26 + { + yyVAL.expr = LogicalExpr{left: yyDollar[1].expr, operator: operatorAnd, right: yyDollar[3].expr} + } + case 4: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:27 + { + yyVAL.expr = LogicalExpr{left: yyDollar[1].expr, operator: operatorOr, right: yyDollar[3].expr} + } + case 5: + yyDollar = yyS[yypt-4 : yypt+1] +//line pkg/dsl/expr.y:28 + { + yyVAL.expr = WithExpr{key: yyDollar[3].value} + } + case 6: + yyDollar = yyS[yypt-4 : yypt+1] +//line pkg/dsl/expr.y:29 + { + yyVAL.expr = WithoutExpr{key: yyDollar[3].value} + } + case 7: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:30 + { + yyVAL.expr = EqExpr{key: yyDollar[1].value, value: yyDollar[3].value} + } + case 8: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:31 + { + yyVAL.expr = NEqExpr{key: yyDollar[1].value, value: yyDollar[3].value} + } + case 9: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:32 + { + yyVAL.expr = RegExpr{key: yyDollar[1].value, value: yyDollar[3].value} + } + case 10: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:33 + { + yyVAL.expr = NRegExpr{key: yyDollar[1].value, value: yyDollar[3].value} + } + } + goto yystack /* stack new state and value */ +} diff --git a/pkg/dsl/lexer.go b/pkg/dsl/lexer.go new file mode 100644 index 000000000..5f3773f5a --- /dev/null +++ b/pkg/dsl/lexer.go @@ -0,0 +1,185 @@ +package dsl + +import ( + "errors" + "fmt" + "regexp" + "strings" + "text/scanner" + + "github.com/netobserv/flowlogs-pipeline/pkg/utils/filters" + "github.com/prometheus/prometheus/util/strutil" +) + +const ( + operatorOr = "or" + operatorAnd = "and" +) + +var tokens = map[string]int{ + "=": EQ, + "!=": NEQ, + "=~": REG, + "!~": NREG, + operatorOr: OR, + operatorAnd: AND, + "with": WITH, + "without": WITHOUT, + "(": OPEN_PARENTHESIS, + ")": CLOSE_PARENTHESIS, +} + +type Expression interface { + toTree() (*tree, error) +} + +type ParenthesisExpr struct { + inner Expression +} + +func (pe ParenthesisExpr) toTree() (*tree, error) { + return pe.inner.toTree() +} + +type EqExpr struct { + key string + value string +} + +func (ee EqExpr) toTree() (*tree, error) { + return &tree{predicate: filters.Equal(ee.key, ee.value, false)}, nil +} + +type NEqExpr struct { + key string + value string +} + +func (ne NEqExpr) toTree() (*tree, error) { + return &tree{predicate: filters.NotEqual(ne.key, ne.value, false)}, nil +} + +type RegExpr struct { + key string + value string +} + +func (re RegExpr) toTree() (*tree, error) { + r, err := regexp.Compile(re.value) + if err != nil { + return nil, fmt.Errorf("invalid regex filter: cannot compile regex [%w]", err) + } + return &tree{predicate: filters.Regex(re.key, r)}, nil +} + +type NRegExpr struct { + key string + value string +} + +func (re NRegExpr) toTree() (*tree, error) { + r, err := regexp.Compile(re.value) + if err != nil { + return nil, fmt.Errorf("invalid regex filter: cannot compile regex [%w]", err) + } + return &tree{predicate: filters.NotRegex(re.key, r)}, nil +} + +type WithExpr struct { + key string +} + +func (we WithExpr) toTree() (*tree, error) { + return &tree{predicate: filters.Presence(we.key)}, nil +} + +type WithoutExpr struct { + key string +} + +func (we WithoutExpr) toTree() (*tree, error) { + return &tree{predicate: filters.Absence(we.key)}, nil +} + +type LogicalExpr struct { + left Expression + operator string + right Expression +} + +func (le LogicalExpr) toTree() (*tree, error) { + left, err := le.left.toTree() + if err != nil { + return nil, err + } + right, err := le.right.toTree() + if err != nil { + return nil, err + } + return &tree{ + logicalOp: le.operator, + children: []*tree{left, right}, + }, nil +} + +type Lexer struct { + scanner.Scanner + errs []error + result Expression +} + +func (l *Lexer) Lex(lval *yySymType) int { + token := l.Scan() + if token == scanner.EOF { + return 0 + } + tokenText := l.TokenText() + lval.value = tokenText + switch token { + + case scanner.Int, scanner.Float: + return NUMBER + + case scanner.String, scanner.RawString: + var err error + + lval.value, err = strutil.Unquote(tokenText) + if err != nil { + l.Error(err.Error()) + + return 0 + } + + return STRING + } + + tokenNext := tokenText + string(l.Peek()) + if tok, ok := tokens[tokenNext]; ok { + l.Next() + return tok + } + + if tok, ok := tokens[strings.ToLower(tokenText)]; ok { + return tok + } + + return VAR +} + +func (l *Lexer) Error(msg string) { + l.errs = append(l.errs, fmt.Errorf("%s: %d:%d", msg, l.Line, l.Column)) +} + +func Parse(s string) (filters.Predicate, error) { + l := new(Lexer) + l.Init(strings.NewReader(s)) + yyParse(l) + if len(l.errs) > 0 { + return nil, errors.Join(l.errs...) + } + t, err := l.result.toTree() + if err != nil { + return nil, err + } + return t.apply, nil +} From 6d9e1f51a9e7fb6479853f389d918779084264ba Mon Sep 17 00:00:00 2001 From: Joel Takvorian Date: Tue, 22 Apr 2025 17:50:30 +0200 Subject: [PATCH 3/7] Replace keep_entry API with query API --- docs/api.md | 15 +----- pkg/api/transform_filter.go | 23 +------- pkg/pipeline/transform/transform_filter.go | 43 +++++++-------- .../transform/transform_filter_test.go | 54 +++---------------- pkg/utils/filters/filters.go | 40 -------------- 5 files changed, 29 insertions(+), 146 deletions(-) diff --git a/docs/api.md b/docs/api.md index 37ff16bd7..0e5aca975 100644 --- a/docs/api.md +++ b/docs/api.md @@ -162,7 +162,7 @@ Following is the supported API format for filter transformations: remove_entry_if_equal: removes the entry if the field value equals specified value remove_entry_if_not_equal: removes the entry if the field value does not equal specified value remove_entry_all_satisfied: removes the entry if all of the defined rules are satisfied - keep_entry_all_satisfied: keeps the entry if the set of rules are all satisfied + keep_entry_query: keeps the entry if it matches the query add_field: adds (input) field to the entry; overrides previous value if present (key=input, value=value) add_field_if_doesnt_exist: adds a field to the entry if the field does not exist add_field_if: add output field set to assignee if input field satisfies criteria from parameters field @@ -188,18 +188,7 @@ Following is the supported API format for filter transformations: input: entry input field value: specified value of input field: castInt: set true to cast the value field as an int (numeric values are float64 otherwise) - keepEntryAllSatisfied: configuration for keep_entry rule - type: (enum) one of the following: - keep_entry_if_exists: keeps the entry if the field exists - keep_entry_if_doesnt_exist: keeps the entry if the field does not exist - keep_entry_if_equal: keeps the entry if the field value equals specified value - keep_entry_if_not_equal: keeps the entry if the field value does not equal specified value - keep_entry_if_regex_match: keeps the entry if the field value matches the specified regex - keep_entry_if_not_regex_match: keeps the entry if the field value does not match the specified regex - keepEntry: configuration for keep_entry_* rules - input: entry input field - value: specified value of input field: - castInt: set true to cast the value field as an int (numeric values are float64 otherwise) + keepEntryQuery: configuration for keep_entry rule keepEntrySampling: sampling value for keep_entry type: 1 flow on is kept addField: configuration for add_field rule input: entry input field diff --git a/pkg/api/transform_filter.go b/pkg/api/transform_filter.go index e41d7cd90..29f0b0cfc 100644 --- a/pkg/api/transform_filter.go +++ b/pkg/api/transform_filter.go @@ -37,7 +37,7 @@ const ( RemoveEntryIfEqual TransformFilterEnum = "remove_entry_if_equal" // removes the entry if the field value equals specified value RemoveEntryIfNotEqual TransformFilterEnum = "remove_entry_if_not_equal" // removes the entry if the field value does not equal specified value RemoveEntryAllSatisfied TransformFilterEnum = "remove_entry_all_satisfied" // removes the entry if all of the defined rules are satisfied - KeepEntryAllSatisfied TransformFilterEnum = "keep_entry_all_satisfied" // keeps the entry if the set of rules are all satisfied + KeepEntryQuery TransformFilterEnum = "keep_entry_query" // keeps the entry if it matches the query AddField TransformFilterEnum = "add_field" // adds (input) field to the entry; overrides previous value if present (key=input, value=value) AddFieldIfDoesntExist TransformFilterEnum = "add_field_if_doesnt_exist" // adds a field to the entry if the field does not exist AddFieldIf TransformFilterEnum = "add_field_if" // add output field set to assignee if input field satisfies criteria from parameters field @@ -56,23 +56,12 @@ const ( RemoveEntryIfNotEqualD TransformFilterRemoveEntryEnum = "remove_entry_if_not_equal" // removes the entry if the field value does not equal specified value ) -type TransformFilterKeepEntryEnum string - -const ( - KeepEntryIfExists TransformFilterKeepEntryEnum = "keep_entry_if_exists" // keeps the entry if the field exists - KeepEntryIfDoesntExist TransformFilterKeepEntryEnum = "keep_entry_if_doesnt_exist" // keeps the entry if the field does not exist - KeepEntryIfEqual TransformFilterKeepEntryEnum = "keep_entry_if_equal" // keeps the entry if the field value equals specified value - KeepEntryIfNotEqual TransformFilterKeepEntryEnum = "keep_entry_if_not_equal" // keeps the entry if the field value does not equal specified value - KeepEntryIfRegexMatch TransformFilterKeepEntryEnum = "keep_entry_if_regex_match" // keeps the entry if the field value matches the specified regex - KeepEntryIfNotRegexMatch TransformFilterKeepEntryEnum = "keep_entry_if_not_regex_match" // keeps the entry if the field value does not match the specified regex -) - type TransformFilterRule struct { Type TransformFilterEnum `yaml:"type,omitempty" json:"type,omitempty" doc:"(enum) one of the following:"` RemoveField *TransformFilterGenericRule `yaml:"removeField,omitempty" json:"removeField,omitempty" doc:"configuration for remove_field rule"` RemoveEntry *TransformFilterGenericRule `yaml:"removeEntry,omitempty" json:"removeEntry,omitempty" doc:"configuration for remove_entry_* rules"` RemoveEntryAllSatisfied []*RemoveEntryRule `yaml:"removeEntryAllSatisfied,omitempty" json:"removeEntryAllSatisfied,omitempty" doc:"configuration for remove_entry_all_satisfied rule"` - KeepEntryAllSatisfied []*KeepEntryRule `yaml:"keepEntryAllSatisfied,omitempty" json:"keepEntryAllSatisfied,omitempty" doc:"configuration for keep_entry rule"` + KeepEntryQuery string `yaml:"keepEntryQuery,omitempty" json:"keepEntryQuery,omitempty" doc:"configuration for keep_entry rule"` KeepEntrySampling uint16 `yaml:"keepEntrySampling,omitempty" json:"keepEntrySampling,omitempty" doc:"sampling value for keep_entry type: 1 flow on is kept"` AddField *TransformFilterGenericRule `yaml:"addField,omitempty" json:"addField,omitempty" doc:"configuration for add_field rule"` AddFieldIfDoesntExist *TransformFilterGenericRule `yaml:"addFieldIfDoesntExist,omitempty" json:"addFieldIfDoesntExist,omitempty" doc:"configuration for add_field_if_doesnt_exist rule"` @@ -93,9 +82,6 @@ func (r *TransformFilterRule) preprocess() { for i := range r.RemoveEntryAllSatisfied { r.RemoveEntryAllSatisfied[i].RemoveEntry.preprocess() } - for i := range r.KeepEntryAllSatisfied { - r.KeepEntryAllSatisfied[i].KeepEntry.preprocess() - } for i := range r.ConditionalSampling { r.ConditionalSampling[i].preprocess() } @@ -127,11 +113,6 @@ type RemoveEntryRule struct { RemoveEntry *TransformFilterGenericRule `yaml:"removeEntry,omitempty" json:"removeEntry,omitempty" doc:"configuration for remove_entry_* rules"` } -type KeepEntryRule struct { - Type TransformFilterKeepEntryEnum `yaml:"type,omitempty" json:"type,omitempty" doc:"(enum) one of the following:"` - KeepEntry *TransformFilterGenericRule `yaml:"keepEntry,omitempty" json:"keepEntry,omitempty" doc:"configuration for keep_entry_* rules"` -} - type SamplingCondition struct { Value uint16 `yaml:"value,omitempty" json:"value,omitempty" doc:"sampling value: 1 flow on is kept"` Rules []*RemoveEntryRule `yaml:"rules,omitempty" json:"rules,omitempty" doc:"rules to be satisfied for this sampling configuration"` diff --git a/pkg/pipeline/transform/transform_filter.go b/pkg/pipeline/transform/transform_filter.go index 0d8dd7189..a2ab7285c 100644 --- a/pkg/pipeline/transform/transform_filter.go +++ b/pkg/pipeline/transform/transform_filter.go @@ -27,6 +27,7 @@ import ( "github.com/Knetic/govaluate" "github.com/netobserv/flowlogs-pipeline/pkg/api" "github.com/netobserv/flowlogs-pipeline/pkg/config" + "github.com/netobserv/flowlogs-pipeline/pkg/dsl" "github.com/netobserv/flowlogs-pipeline/pkg/utils" "github.com/netobserv/flowlogs-pipeline/pkg/utils/filters" "github.com/sirupsen/logrus" @@ -39,12 +40,12 @@ var ( type Filter struct { Rules []api.TransformFilterRule - KeepRules []predicatesRule + KeepRules []predicateRule } -type predicatesRule struct { - predicates []filters.Predicate - sampling uint16 +type predicateRule struct { + predicate filters.Predicate + sampling uint16 } // Transform transforms a flow; if false is returned as a second argument, the entry is dropped @@ -55,7 +56,7 @@ func (f *Filter) Transform(entry config.GenericMap) (config.GenericMap, bool) { if len(f.KeepRules) > 0 { keep := false for _, r := range f.KeepRules { - if applyPredicates(outputEntry, r) { + if applyPredicate(outputEntry, r) { keep = true break } @@ -162,9 +163,9 @@ func applyRule(entry config.GenericMap, labels map[string]string, rule *api.Tran return !isRemoveEntrySatisfied(entry, rule.RemoveEntryAllSatisfied) case api.ConditionalSampling: return sample(entry, rule.ConditionalSampling) - case api.KeepEntryAllSatisfied: + case api.KeepEntryQuery: // This should be processed only in "applyPredicates". Failure to do so is a bug. - tlog.Panicf("unexpected KeepEntryAllSatisfied: %v", rule) + tlog.Panicf("unexpected KeepEntryQuery: %v", rule) default: tlog.Panicf("unknown type %s for transform.Filter rule: %v", rule.Type, rule) } @@ -181,16 +182,11 @@ func isRemoveEntrySatisfied(entry config.GenericMap, rules []*api.RemoveEntryRul return true } -func applyPredicates(entry config.GenericMap, rule predicatesRule) bool { +func applyPredicate(entry config.GenericMap, rule predicateRule) bool { if !rollSampling(rule.sampling) { return false } - for _, p := range rule.predicates { - if !p(entry) { - return false - } - } - return true + return rule.predicate(entry) } func sample(entry config.GenericMap, rules []*api.SamplingCondition) bool { @@ -209,22 +205,21 @@ func rollSampling(value uint16) bool { // NewTransformFilter create a new filter transform func NewTransformFilter(params config.StageParam) (Transformer, error) { tlog.Debugf("entering NewTransformFilter") - keepRules := []predicatesRule{} + keepRules := []predicateRule{} rules := []api.TransformFilterRule{} if params.Transform != nil && params.Transform.Filter != nil { params.Transform.Filter.Preprocess() for i := range params.Transform.Filter.Rules { baseRules := ¶ms.Transform.Filter.Rules[i] - if baseRules.Type == api.KeepEntryAllSatisfied { - pr := predicatesRule{sampling: baseRules.KeepEntrySampling} - for _, keepRule := range baseRules.KeepEntryAllSatisfied { - pred, err := filters.FromKeepEntry(keepRule) - if err != nil { - return nil, err - } - pr.predicates = append(pr.predicates, pred) + if baseRules.Type == api.KeepEntryQuery { + predicate, err := dsl.Parse(baseRules.KeepEntryQuery) + if err != nil { + return nil, err } - keepRules = append(keepRules, pr) + keepRules = append(keepRules, predicateRule{ + sampling: baseRules.KeepEntrySampling, + predicate: predicate, + }) } else { rules = append(rules, *baseRules) } diff --git a/pkg/pipeline/transform/transform_filter_test.go b/pkg/pipeline/transform/transform_filter_test.go index be214ab36..8d4fccb16 100644 --- a/pkg/pipeline/transform/transform_filter_test.go +++ b/pkg/pipeline/transform/transform_filter_test.go @@ -676,34 +676,8 @@ func Test_Transform_KeepEntry(t *testing.T) { newFilter := api.TransformFilter{ Rules: []api.TransformFilterRule{ { - Type: api.KeepEntryAllSatisfied, - KeepEntryAllSatisfied: []*api.KeepEntryRule{ - { - Type: api.KeepEntryIfEqual, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "namespace", - Value: "A", - }, - }, - { - Type: api.KeepEntryIfExists, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "workload", - }, - }, - }, - }, - { - Type: api.KeepEntryAllSatisfied, - KeepEntryAllSatisfied: []*api.KeepEntryRule{ - { - Type: api.KeepEntryIfRegexMatch, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "service", - Value: "abc.+", - }, - }, - }, + Type: api.KeepEntryQuery, + KeepEntryQuery: `(namespace="A" and with(workload)) or service=~"abc.+"`, }, }, } @@ -742,29 +716,13 @@ func Test_Transform_KeepEntrySampling(t *testing.T) { newFilter := api.TransformFilter{ Rules: []api.TransformFilterRule{ { - Type: api.KeepEntryAllSatisfied, - KeepEntryAllSatisfied: []*api.KeepEntryRule{ - { - Type: api.KeepEntryIfEqual, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "namespace", - Value: "A", - }, - }, - }, + Type: api.KeepEntryQuery, + KeepEntryQuery: `namespace="A"`, KeepEntrySampling: 10, }, { - Type: api.KeepEntryAllSatisfied, - KeepEntryAllSatisfied: []*api.KeepEntryRule{ - { - Type: api.KeepEntryIfEqual, - KeepEntry: &api.TransformFilterGenericRule{ - Input: "namespace", - Value: "B", - }, - }, - }, + Type: api.KeepEntryQuery, + KeepEntryQuery: `namespace="B"`, }, }, } diff --git a/pkg/utils/filters/filters.go b/pkg/utils/filters/filters.go index 8bb12fcb6..61db4b6d3 100644 --- a/pkg/utils/filters/filters.go +++ b/pkg/utils/filters/filters.go @@ -1,11 +1,9 @@ package filters import ( - "fmt" "regexp" "strings" - "github.com/netobserv/flowlogs-pipeline/pkg/api" "github.com/netobserv/flowlogs-pipeline/pkg/config" "github.com/netobserv/flowlogs-pipeline/pkg/utils" ) @@ -113,41 +111,3 @@ func injectVars(flow config.GenericMap, filterValue string, varLookups [][]strin } return injected } - -func FromKeepEntry(from *api.KeepEntryRule) (Predicate, error) { - switch from.Type { - case api.KeepEntryIfExists: - return Presence(from.KeepEntry.Input), nil - case api.KeepEntryIfDoesntExist: - return Absence(from.KeepEntry.Input), nil - case api.KeepEntryIfEqual: - return Equal(from.KeepEntry.Input, from.KeepEntry.Value, true), nil - case api.KeepEntryIfNotEqual: - return NotEqual(from.KeepEntry.Input, from.KeepEntry.Value, true), nil - case api.KeepEntryIfRegexMatch: - if r, err := compileRegex(from.KeepEntry); err != nil { - return nil, err - } else { - return Regex(from.KeepEntry.Input, r), nil - } - case api.KeepEntryIfNotRegexMatch: - if r, err := compileRegex(from.KeepEntry); err != nil { - return nil, err - } else { - return NotRegex(from.KeepEntry.Input, r), nil - } - } - return nil, fmt.Errorf("keep entry rule type not recognized: %s", from.Type) -} - -func compileRegex(from *api.TransformFilterGenericRule) (*regexp.Regexp, error) { - s, ok := from.Value.(string) - if !ok { - return nil, fmt.Errorf("invalid regex keep rule: rule value must be a string [%v]", from) - } - r, err := regexp.Compile(s) - if err != nil { - return nil, fmt.Errorf("invalid regex keep rule: cannot compile regex [%w]", err) - } - return r, nil -} From 6781b9d43317fd6b512092224a6a85c6fce37ade Mon Sep 17 00:00:00 2001 From: Joel Takvorian Date: Tue, 22 Apr 2025 21:13:59 +0200 Subject: [PATCH 4/7] verbose errors --- pkg/dsl/eval_test.go | 4 ---- pkg/dsl/lexer.go | 1 + 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/pkg/dsl/eval_test.go b/pkg/dsl/eval_test.go index 67f45aa73..155a5fa95 100644 --- a/pkg/dsl/eval_test.go +++ b/pkg/dsl/eval_test.go @@ -8,7 +8,6 @@ import ( ) func TestAndOrEqual(t *testing.T) { - yyErrorVerbose = true predicate, err := Parse(`(srcnamespace="netobserv" OR (srcnamespace="ingress" AND dstnamespace="netobserv")) AND srckind!="service"`) assert.NoError(t, err) assert.NotNil(t, predicate) @@ -36,7 +35,6 @@ func TestAndOrEqual(t *testing.T) { } func TestRegexp(t *testing.T) { - yyErrorVerbose = true predicate, err := Parse(`srcnamespace=~"openshift.*" and dstnamespace!~"openshift.*"`) assert.NoError(t, err) assert.NotNil(t, predicate) @@ -71,7 +69,6 @@ func TestRegexp(t *testing.T) { } func TestWith(t *testing.T) { - yyErrorVerbose = true predicate, err := Parse(`srcnamespace="foo" and with(rtt)`) assert.NoError(t, err) assert.NotNil(t, predicate) @@ -89,7 +86,6 @@ func TestWith(t *testing.T) { } func TestWithout(t *testing.T) { - yyErrorVerbose = true predicate, err := Parse(`srcnamespace="foo" or without(srcnamespace)`) assert.NoError(t, err) assert.NotNil(t, predicate) diff --git a/pkg/dsl/lexer.go b/pkg/dsl/lexer.go index 5f3773f5a..a126e8aee 100644 --- a/pkg/dsl/lexer.go +++ b/pkg/dsl/lexer.go @@ -173,6 +173,7 @@ func (l *Lexer) Error(msg string) { func Parse(s string) (filters.Predicate, error) { l := new(Lexer) l.Init(strings.NewReader(s)) + yyErrorVerbose = true yyParse(l) if len(l.errs) > 0 { return nil, errors.Join(l.errs...) From c6200c622587b24aa4d546ddb772bf1bb4d0c09d Mon Sep 17 00:00:00 2001 From: Joel Takvorian Date: Wed, 23 Apr 2025 08:52:21 +0200 Subject: [PATCH 5/7] Add documentation and make target --- Makefile | 6 ++++ README.md | 10 ++++++ docs/filtering.md | 66 ++++++++++++++++++++++++++++++++++++++ pkg/dsl/expr.y | 14 ++++----- pkg/dsl/expr.y.go | 80 +++++++++++++++++++++++------------------------ pkg/dsl/lexer.go | 21 +++++++------ 6 files changed, 141 insertions(+), 56 deletions(-) create mode 100644 docs/filtering.md diff --git a/Makefile b/Makefile index 0cd56ac13..2a8f7060b 100644 --- a/Makefile +++ b/Makefile @@ -197,5 +197,11 @@ else DOCKER_BUILDKIT=1 $(OCI_BIN) manifest push ${IMAGE} docker://${IMAGE}; endif +.PHONY: goyacc +goyacc: ## Regenerate filters query langage + @echo "### Regenerate filters query langage" + GOFLAGS="" go install golang.org/x/tools/cmd/goyacc@v0.32.0 + goyacc -o pkg/dsl/expr.y.go pkg/dsl/expr.y + include .mk/development.mk include .mk/shortcuts.mk diff --git a/README.md b/README.md index 193e4e579..48b48fc8f 100644 --- a/README.md +++ b/README.md @@ -386,6 +386,16 @@ removal of only the `SrcPort` key and value Using `remove_entry_if_equal` will remove the entry if the specified field exists and is equal to the specified value. Using `remove_entry_if_not_equal` will remove the entry if the specified field exists and is not equal to the specified value. +#### Transform Filter: query language + +Alternatively, a query language allows to filter flows, keeping entries rather than removing them. + +``` +(srcnamespace="netobserv" OR (srcnamespace="ingress" AND dstnamespace="netobserv")) AND srckind!="service" +``` + +[See here](./docs/filtering.md) for more information about this language. + ### Transform Network `transform network` provides specific functionality that is useful for transformation of network flow-logs: diff --git a/docs/filtering.md b/docs/filtering.md new file mode 100644 index 000000000..8aa2821b1 --- /dev/null +++ b/docs/filtering.md @@ -0,0 +1,66 @@ +# FLP filtering language + +Flowlogs-pipeline uses a simple query language to filter network flows: + +``` +(srcnamespace="netobserv" OR (srcnamespace="ingress" AND dstnamespace="netobserv")) AND srckind!="service" +``` + +The syntax includes: + +- Logical boolean operators (case insensitive) + - `and` + - `or` +- String comparison operators + - equals `=` + - not equals `!=` + - matches regexp `=~` + - not matches regexp `!~` +- Unary operations + - field is present: `with(field)` + - field is absent: `without(field)` +- Parenthesis-based priority + +## API integration + +The language is currently integrated in the "keep_entry" transform/filtering API. Example: + +```yaml + transform: + type: filter + filter: + rules: + - type: keep_entry_query + keepEntryQuery: (namespace="A" and with(workload)) or service=~"abc.+" + keepEntrySampling: 10 # Optionally, a sampling ratio can be associated with the filter +``` + +## Integration with the NetObserv operator + +In the [NetObserv operator](https://github.com/netobserv/network-observability-operator), the filtering query language is used in `FlowCollector` `spec.processor.filters`. Example: + +```yaml +spec: + processor: + filters: + - query: | + (SrcK8S_Namespace="netobserv" OR (SrcK8S_Namespace="openshift-ingress" AND DstK8S_Namespace="netobserv")) + outputTarget: Loki # The filter can target a specific output (such as Loki logs or exported data), or all outputs. + sampling: 10 # Optionally, a sampling ratio can be associated with the filter +``` + +See also the [list of field names](https://github.com/netobserv/network-observability-operator/blob/main/docs/flows-format.adoc) that are available for queries, and the [API documentation](https://github.com/netobserv/network-observability-operator/blob/main/docs/FlowCollector.md#flowcollectorspecprocessorfiltersindex-1). + +## Internals + +This language is designed using [Yacc](https://en.wikipedia.org/wiki/Yacc) / goyacc. + +The [definition file](../pkg/dsl/expr.y) describes the syntax based on a list of tokens. It is derived to a [go source file](../pkg/dsl/expr.y.go) using [goyacc](https://pkg.go.dev/golang.org/x/tools/cmd/goyacc), which defines constants for the tokens, among other things. The [lexer](../pkg/dsl/lexer.go) file defines structures and helpers that can be used from `expr.y`, the logic used to interpret the language in a structured way, and is also where actual characters/strings are mapped to syntax tokens. Finally, [eval.go](../pkg/dsl/eval.go) runs the desired query on actual data. + +When adding features to the language, you'll likely have to change `expr.y` and `lexer.go`. + +To regenerate `expr.y.go`, run: + +```bash +make goyacc +``` diff --git a/pkg/dsl/expr.y b/pkg/dsl/expr.y index 7d7a1e15e..3f921cb53 100644 --- a/pkg/dsl/expr.y +++ b/pkg/dsl/expr.y @@ -10,7 +10,7 @@ package dsl %type root %type expr -%token VAR STRING NUMBER AND OR EQ NEQ REG NREG OPEN_PARENTHESIS CLOSE_PARENTHESIS WITH WITHOUT +%token NF_FIELD STRING NUMBER AND OR EQ NEQ REG NREG OPEN_PARENTHESIS CLOSE_PARENTHESIS WITH WITHOUT %left AND %left OR %% @@ -25,10 +25,10 @@ expr: OPEN_PARENTHESIS expr CLOSE_PARENTHESIS { $$ = ParenthesisExpr{inner: $2} } | expr AND expr { $$ = LogicalExpr{left: $1, operator: operatorAnd, right: $3} } | expr OR expr { $$ = LogicalExpr{left: $1, operator: operatorOr, right: $3} } - | WITH OPEN_PARENTHESIS VAR CLOSE_PARENTHESIS { $$ = WithExpr{key: $3} } - | WITHOUT OPEN_PARENTHESIS VAR CLOSE_PARENTHESIS { $$ = WithoutExpr{key: $3} } - | VAR EQ STRING { $$ = EqExpr{key: $1, value: $3} } - | VAR NEQ STRING { $$ = NEqExpr{key: $1, value: $3} } - | VAR REG STRING { $$ = RegExpr{key: $1, value: $3} } - | VAR NREG STRING { $$ = NRegExpr{key: $1, value: $3} } + | WITH OPEN_PARENTHESIS NF_FIELD CLOSE_PARENTHESIS { $$ = WithExpr{key: $3} } + | WITHOUT OPEN_PARENTHESIS NF_FIELD CLOSE_PARENTHESIS { $$ = WithoutExpr{key: $3} } + | NF_FIELD EQ STRING { $$ = EqExpr{key: $1, value: $3} } + | NF_FIELD NEQ STRING { $$ = NEqExpr{key: $1, value: $3} } + | NF_FIELD REG STRING { $$ = RegExpr{key: $1, value: $3} } + | NF_FIELD NREG STRING { $$ = NRegExpr{key: $1, value: $3} } %% diff --git a/pkg/dsl/expr.y.go b/pkg/dsl/expr.y.go index bd79c76f2..bb875ea79 100644 --- a/pkg/dsl/expr.y.go +++ b/pkg/dsl/expr.y.go @@ -14,7 +14,7 @@ type yySymType struct { value string } -const VAR = 57346 +const NF_FIELD = 57346 const STRING = 57347 const NUMBER = 57348 const AND = 57349 @@ -32,7 +32,7 @@ var yyToknames = [...]string{ "$end", "error", "$unk", - "VAR", + "NF_FIELD", "STRING", "NUMBER", "AND", @@ -56,7 +56,7 @@ const yyInitialStackSize = 16 //line pkg/dsl/expr.y:34 //line yacctab:1 -var yyExca = [...]int{ +var yyExca = [...]int8{ -1, 1, 1, -1, -2, 0, @@ -66,54 +66,54 @@ const yyPrivate = 57344 const yyLast = 29 -var yyAct = [...]int{ +var yyAct = [...]int8{ 6, 7, 8, 26, 12, 13, 14, 15, 18, 3, 11, 4, 5, 25, 10, 2, 7, 8, 8, 9, 24, 23, 22, 16, 17, 21, 20, 19, 1, } -var yyPact = [...]int{ +var yyPact = [...]int16{ -4, -1000, 9, -4, 1, -3, -5, -4, -4, -6, 23, 22, 20, 17, 16, 15, 10, -1000, -1000, -1, -11, -1000, -1000, -1000, -1000, -1000, -1000, } -var yyPgo = [...]int{ +var yyPgo = [...]int8{ 0, 28, 15, } -var yyR1 = [...]int{ +var yyR1 = [...]int8{ 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, } -var yyR2 = [...]int{ +var yyR2 = [...]int8{ 0, 1, 3, 3, 3, 4, 4, 3, 3, 3, 3, } -var yyChk = [...]int{ +var yyChk = [...]int16{ -1000, -1, -2, 13, 15, 16, 4, 7, 8, -2, 13, 13, 9, 10, 11, 12, -2, -2, 14, 4, 4, 5, 5, 5, 5, 14, 14, } -var yyDef = [...]int{ +var yyDef = [...]int8{ 0, -2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 2, 0, 0, 7, 8, 9, 10, 5, 6, } -var yyTok1 = [...]int{ +var yyTok1 = [...]int8{ 1, } -var yyTok2 = [...]int{ +var yyTok2 = [...]int8{ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, } -var yyTok3 = [...]int{ +var yyTok3 = [...]int8{ 0, } @@ -195,9 +195,9 @@ func yyErrorMessage(state, lookAhead int) string { expected := make([]int, 0, 4) // Look for shiftable tokens. - base := yyPact[state] + base := int(yyPact[state]) for tok := TOKSTART; tok-1 < len(yyToknames); tok++ { - if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok { + if n := base + tok; n >= 0 && n < yyLast && int(yyChk[int(yyAct[n])]) == tok { if len(expected) == cap(expected) { return res } @@ -207,13 +207,13 @@ func yyErrorMessage(state, lookAhead int) string { if yyDef[state] == -2 { i := 0 - for yyExca[i] != -1 || yyExca[i+1] != state { + for yyExca[i] != -1 || int(yyExca[i+1]) != state { i += 2 } // Look for tokens that we accept or reduce. for i += 2; yyExca[i] >= 0; i += 2 { - tok := yyExca[i] + tok := int(yyExca[i]) if tok < TOKSTART || yyExca[i+1] == 0 { continue } @@ -244,30 +244,30 @@ func yylex1(lex yyLexer, lval *yySymType) (char, token int) { token = 0 char = lex.Lex(lval) if char <= 0 { - token = yyTok1[0] + token = int(yyTok1[0]) goto out } if char < len(yyTok1) { - token = yyTok1[char] + token = int(yyTok1[char]) goto out } if char >= yyPrivate { if char < yyPrivate+len(yyTok2) { - token = yyTok2[char-yyPrivate] + token = int(yyTok2[char-yyPrivate]) goto out } } for i := 0; i < len(yyTok3); i += 2 { - token = yyTok3[i+0] + token = int(yyTok3[i+0]) if token == char { - token = yyTok3[i+1] + token = int(yyTok3[i+1]) goto out } } out: if token == 0 { - token = yyTok2[1] /* unknown char */ + token = int(yyTok2[1]) /* unknown char */ } if yyDebug >= 3 { __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char)) @@ -322,7 +322,7 @@ yystack: yyS[yyp].yys = yystate yynewstate: - yyn = yyPact[yystate] + yyn = int(yyPact[yystate]) if yyn <= yyFlag { goto yydefault /* simple state */ } @@ -333,8 +333,8 @@ yynewstate: if yyn < 0 || yyn >= yyLast { goto yydefault } - yyn = yyAct[yyn] - if yyChk[yyn] == yytoken { /* valid shift */ + yyn = int(yyAct[yyn]) + if int(yyChk[yyn]) == yytoken { /* valid shift */ yyrcvr.char = -1 yytoken = -1 yyVAL = yyrcvr.lval @@ -347,7 +347,7 @@ yynewstate: yydefault: /* default state action */ - yyn = yyDef[yystate] + yyn = int(yyDef[yystate]) if yyn == -2 { if yyrcvr.char < 0 { yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) @@ -356,18 +356,18 @@ yydefault: /* look through exception table */ xi := 0 for { - if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate { + if yyExca[xi+0] == -1 && int(yyExca[xi+1]) == yystate { break } xi += 2 } for xi += 2; ; xi += 2 { - yyn = yyExca[xi+0] + yyn = int(yyExca[xi+0]) if yyn < 0 || yyn == yytoken { break } } - yyn = yyExca[xi+1] + yyn = int(yyExca[xi+1]) if yyn < 0 { goto ret0 } @@ -389,10 +389,10 @@ yydefault: /* find a state where "error" is a legal shift action */ for yyp >= 0 { - yyn = yyPact[yyS[yyp].yys] + yyErrCode + yyn = int(yyPact[yyS[yyp].yys]) + yyErrCode if yyn >= 0 && yyn < yyLast { - yystate = yyAct[yyn] /* simulate a shift of "error" */ - if yyChk[yystate] == yyErrCode { + yystate = int(yyAct[yyn]) /* simulate a shift of "error" */ + if int(yyChk[yystate]) == yyErrCode { goto yystack } } @@ -428,7 +428,7 @@ yydefault: yypt := yyp _ = yypt // guard against "declared and not used" - yyp -= yyR2[yyn] + yyp -= int(yyR2[yyn]) // yyp is now the index of $0. Perform the default action. Iff the // reduced production is ε, $1 is possibly out of range. if yyp+1 >= len(yyS) { @@ -439,16 +439,16 @@ yydefault: yyVAL = yyS[yyp+1] /* consult goto table to find next state */ - yyn = yyR1[yyn] - yyg := yyPgo[yyn] + yyn = int(yyR1[yyn]) + yyg := int(yyPgo[yyn]) yyj := yyg + yyS[yyp].yys + 1 if yyj >= yyLast { - yystate = yyAct[yyg] + yystate = int(yyAct[yyg]) } else { - yystate = yyAct[yyj] - if yyChk[yystate] != -yyn { - yystate = yyAct[yyg] + yystate = int(yyAct[yyj]) + if int(yyChk[yystate]) != -yyn { + yystate = int(yyAct[yyg]) } } // dummy call; replaced with literal code diff --git a/pkg/dsl/lexer.go b/pkg/dsl/lexer.go index a126e8aee..9fe907551 100644 --- a/pkg/dsl/lexer.go +++ b/pkg/dsl/lexer.go @@ -16,7 +16,7 @@ const ( operatorAnd = "and" ) -var tokens = map[string]int{ +var syntaxTokens = map[string]int{ "=": EQ, "!=": NEQ, "=~": REG, @@ -135,35 +135,38 @@ func (l *Lexer) Lex(lval *yySymType) int { } tokenText := l.TokenText() lval.value = tokenText - switch token { + switch token { case scanner.Int, scanner.Float: + // Reading arbitrary number return NUMBER - case scanner.String, scanner.RawString: + // Reading arbitrary double-quotes delimited string var err error - lval.value, err = strutil.Unquote(tokenText) if err != nil { l.Error(err.Error()) - return 0 } - return STRING } + // Check if this is a syntaxToken + + // Some characters are read as a token, such as "=", regardless of what follows + // To read "=~" as a token, we need to Peek next rune manually tokenNext := tokenText + string(l.Peek()) - if tok, ok := tokens[tokenNext]; ok { + if tok, ok := syntaxTokens[tokenNext]; ok { l.Next() return tok } - if tok, ok := tokens[strings.ToLower(tokenText)]; ok { + if tok, ok := syntaxTokens[strings.ToLower(tokenText)]; ok { return tok } - return VAR + // When none of the above returned, this must be a NetFlow field name + return NF_FIELD } func (l *Lexer) Error(msg string) { From 1f1b76080d68b9cd93078d829418a673121aec52 Mon Sep 17 00:00:00 2001 From: Joel Takvorian Date: Wed, 23 Apr 2025 12:39:52 +0200 Subject: [PATCH 6/7] Manage numeric comparisons --- README.md | 1 + docs/filtering.md | 4 +- pkg/dsl/eval_test.go | 40 ++++++++++ pkg/dsl/expr.y | 8 +- pkg/dsl/expr.y.go | 119 +++++++++++++++++++----------- pkg/dsl/lexer.go | 90 +++++++++++++++------- pkg/utils/filters/filters.go | 27 +++++++ pkg/utils/filters/filters_test.go | 38 ++++++++++ 8 files changed, 254 insertions(+), 73 deletions(-) diff --git a/README.md b/README.md index 48b48fc8f..b67c1f176 100644 --- a/README.md +++ b/README.md @@ -955,6 +955,7 @@ Images image-push Push MULTIARCH_TARGETS images manifest-build Build MULTIARCH_TARGETS manifest manifest-push Push MULTIARCH_TARGETS manifest + goyacc Regenerate filters query langage kubernetes deploy Deploy the image diff --git a/docs/filtering.md b/docs/filtering.md index 8aa2821b1..1c13ddb64 100644 --- a/docs/filtering.md +++ b/docs/filtering.md @@ -11,11 +11,13 @@ The syntax includes: - Logical boolean operators (case insensitive) - `and` - `or` -- String comparison operators +- Comparison operators - equals `=` - not equals `!=` - matches regexp `=~` - not matches regexp `!~` + - greater than `>` + - less than `<` - Unary operations - field is present: `with(field)` - field is absent: `without(field)` diff --git a/pkg/dsl/eval_test.go b/pkg/dsl/eval_test.go index 155a5fa95..629a04798 100644 --- a/pkg/dsl/eval_test.go +++ b/pkg/dsl/eval_test.go @@ -103,3 +103,43 @@ func TestWithout(t *testing.T) { }) assert.False(t, result, "Should reject flows from bar") } + +func TestNumeric(t *testing.T) { + predicate, err := Parse(`flowdirection=0 and bytes > 15`) + assert.NoError(t, err) + assert.NotNil(t, predicate) + + result := predicate(config.GenericMap{ + "srcnamespace": "plop", + "flowdirection": 0, + "bytes": 20, + }) + assert.True(t, result) + + result = predicate(config.GenericMap{ + "srcnamespace": "plop", + "flowdirection": int16(0), + "bytes": int16(20), + }) + assert.True(t, result) + + result = predicate(config.GenericMap{ + "srcnamespace": "plop", + "flowdirection": 1, + "bytes": 20, + }) + assert.False(t, result) + + result = predicate(config.GenericMap{ + "srcnamespace": "plop", + "flowdirection": 0, + "bytes": 10, + }) + assert.False(t, result) + + result = predicate(config.GenericMap{ + "srcnamespace": "plop", + "bytes": 20, + }) + assert.False(t, result) +} diff --git a/pkg/dsl/expr.y b/pkg/dsl/expr.y index 3f921cb53..81810d96f 100644 --- a/pkg/dsl/expr.y +++ b/pkg/dsl/expr.y @@ -5,12 +5,14 @@ package dsl %union{ expr Expression value string + intValue int } %type root %type expr -%token NF_FIELD STRING NUMBER AND OR EQ NEQ REG NREG OPEN_PARENTHESIS CLOSE_PARENTHESIS WITH WITHOUT +%token NF_FIELD STRING AND OR EQ NEQ GT LT REG NREG OPEN_PARENTHESIS CLOSE_PARENTHESIS WITH WITHOUT +%token NUMBER %left AND %left OR %% @@ -29,6 +31,10 @@ expr: | WITHOUT OPEN_PARENTHESIS NF_FIELD CLOSE_PARENTHESIS { $$ = WithoutExpr{key: $3} } | NF_FIELD EQ STRING { $$ = EqExpr{key: $1, value: $3} } | NF_FIELD NEQ STRING { $$ = NEqExpr{key: $1, value: $3} } + | NF_FIELD EQ NUMBER { $$ = EqNumExpr{key: $1, value: $3} } + | NF_FIELD NEQ NUMBER { $$ = NEqNumExpr{key: $1, value: $3} } + | NF_FIELD LT NUMBER { $$ = LessThanExpr{key: $1, value: $3} } + | NF_FIELD GT NUMBER { $$ = GreaterThanExpr{key: $1, value: $3} } | NF_FIELD REG STRING { $$ = RegExpr{key: $1, value: $3} } | NF_FIELD NREG STRING { $$ = NRegExpr{key: $1, value: $3} } %% diff --git a/pkg/dsl/expr.y.go b/pkg/dsl/expr.y.go index bb875ea79..a1b49408a 100644 --- a/pkg/dsl/expr.y.go +++ b/pkg/dsl/expr.y.go @@ -9,24 +9,27 @@ import __yyfmt__ "fmt" //line pkg/dsl/expr.y:5 type yySymType struct { - yys int - expr Expression - value string + yys int + expr Expression + value string + intValue int } const NF_FIELD = 57346 const STRING = 57347 -const NUMBER = 57348 -const AND = 57349 -const OR = 57350 -const EQ = 57351 -const NEQ = 57352 -const REG = 57353 -const NREG = 57354 -const OPEN_PARENTHESIS = 57355 -const CLOSE_PARENTHESIS = 57356 -const WITH = 57357 -const WITHOUT = 57358 +const AND = 57348 +const OR = 57349 +const EQ = 57350 +const NEQ = 57351 +const GT = 57352 +const LT = 57353 +const REG = 57354 +const NREG = 57355 +const OPEN_PARENTHESIS = 57356 +const CLOSE_PARENTHESIS = 57357 +const WITH = 57358 +const WITHOUT = 57359 +const NUMBER = 57360 var yyToknames = [...]string{ "$end", @@ -34,17 +37,19 @@ var yyToknames = [...]string{ "$unk", "NF_FIELD", "STRING", - "NUMBER", "AND", "OR", "EQ", "NEQ", + "GT", + "LT", "REG", "NREG", "OPEN_PARENTHESIS", "CLOSE_PARENTHESIS", "WITH", "WITHOUT", + "NUMBER", } var yyStatenames = [...]string{} @@ -53,7 +58,7 @@ const yyEofCode = 1 const yyErrCode = 2 const yyInitialStackSize = 16 -//line pkg/dsl/expr.y:34 +//line pkg/dsl/expr.y:40 //line yacctab:1 var yyExca = [...]int8{ @@ -64,44 +69,48 @@ var yyExca = [...]int8{ const yyPrivate = 57344 -const yyLast = 29 +const yyLast = 35 var yyAct = [...]int8{ - 6, 7, 8, 26, 12, 13, 14, 15, 18, 3, - 11, 4, 5, 25, 10, 2, 7, 8, 8, 9, - 24, 23, 22, 16, 17, 21, 20, 19, 1, + 6, 28, 25, 23, 27, 7, 8, 32, 31, 11, + 3, 8, 4, 5, 20, 26, 24, 12, 13, 15, + 14, 16, 17, 2, 10, 7, 8, 9, 30, 29, + 22, 18, 19, 21, 1, } var yyPact = [...]int16{ - -4, -1000, 9, -4, 1, -3, -5, -4, -4, -6, - 23, 22, 20, 17, 16, 15, 10, -1000, -1000, -1, - -11, -1000, -1000, -1000, -1000, -1000, -1000, + -4, -1000, 19, -4, 10, -5, 9, -4, -4, -1, + 29, 26, -2, -3, -14, -17, 24, 23, 4, -1000, + -1000, -7, -8, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, } var yyPgo = [...]int8{ - 0, 28, 15, + 0, 34, 23, } var yyR1 = [...]int8{ 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, - 2, + 2, 2, 2, 2, 2, } var yyR2 = [...]int8{ 0, 1, 3, 3, 3, 4, 4, 3, 3, 3, - 3, + 3, 3, 3, 3, 3, } var yyChk = [...]int16{ - -1000, -1, -2, 13, 15, 16, 4, 7, 8, -2, - 13, 13, 9, 10, 11, 12, -2, -2, 14, 4, - 4, 5, 5, 5, 5, 14, 14, + -1000, -1, -2, 14, 16, 17, 4, 6, 7, -2, + 14, 14, 8, 9, 11, 10, 12, 13, -2, -2, + 15, 4, 4, 5, 18, 5, 18, 18, 18, 5, + 5, 15, 15, } var yyDef = [...]int8{ 0, -2, 1, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 3, 4, 2, 0, - 0, 7, 8, 9, 10, 5, 6, + 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, + 2, 0, 0, 7, 9, 8, 10, 11, 12, 13, + 14, 5, 6, } var yyTok1 = [...]int8{ @@ -110,7 +119,7 @@ var yyTok1 = [...]int8{ var yyTok2 = [...]int8{ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 16, + 12, 13, 14, 15, 16, 17, 18, } var yyTok3 = [...]int8{ @@ -456,62 +465,86 @@ yydefault: case 1: yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/dsl/expr.y:19 +//line pkg/dsl/expr.y:21 { yyVAL.expr = yyDollar[1].expr yylex.(*Lexer).result = yyVAL.expr } case 2: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/dsl/expr.y:25 +//line pkg/dsl/expr.y:27 { yyVAL.expr = ParenthesisExpr{inner: yyDollar[2].expr} } case 3: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/dsl/expr.y:26 +//line pkg/dsl/expr.y:28 { yyVAL.expr = LogicalExpr{left: yyDollar[1].expr, operator: operatorAnd, right: yyDollar[3].expr} } case 4: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/dsl/expr.y:27 +//line pkg/dsl/expr.y:29 { yyVAL.expr = LogicalExpr{left: yyDollar[1].expr, operator: operatorOr, right: yyDollar[3].expr} } case 5: yyDollar = yyS[yypt-4 : yypt+1] -//line pkg/dsl/expr.y:28 +//line pkg/dsl/expr.y:30 { yyVAL.expr = WithExpr{key: yyDollar[3].value} } case 6: yyDollar = yyS[yypt-4 : yypt+1] -//line pkg/dsl/expr.y:29 +//line pkg/dsl/expr.y:31 { yyVAL.expr = WithoutExpr{key: yyDollar[3].value} } case 7: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/dsl/expr.y:30 +//line pkg/dsl/expr.y:32 { yyVAL.expr = EqExpr{key: yyDollar[1].value, value: yyDollar[3].value} } case 8: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/dsl/expr.y:31 +//line pkg/dsl/expr.y:33 { yyVAL.expr = NEqExpr{key: yyDollar[1].value, value: yyDollar[3].value} } case 9: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/dsl/expr.y:32 +//line pkg/dsl/expr.y:34 { - yyVAL.expr = RegExpr{key: yyDollar[1].value, value: yyDollar[3].value} + yyVAL.expr = EqNumExpr{key: yyDollar[1].value, value: yyDollar[3].intValue} } case 10: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/dsl/expr.y:33 +//line pkg/dsl/expr.y:35 + { + yyVAL.expr = NEqNumExpr{key: yyDollar[1].value, value: yyDollar[3].intValue} + } + case 11: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:36 + { + yyVAL.expr = LessThanExpr{key: yyDollar[1].value, value: yyDollar[3].intValue} + } + case 12: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:37 + { + yyVAL.expr = GreaterThanExpr{key: yyDollar[1].value, value: yyDollar[3].intValue} + } + case 13: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:38 + { + yyVAL.expr = RegExpr{key: yyDollar[1].value, value: yyDollar[3].value} + } + case 14: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:39 { yyVAL.expr = NRegExpr{key: yyDollar[1].value, value: yyDollar[3].value} } diff --git a/pkg/dsl/lexer.go b/pkg/dsl/lexer.go index 9fe907551..a234e2c12 100644 --- a/pkg/dsl/lexer.go +++ b/pkg/dsl/lexer.go @@ -4,6 +4,7 @@ import ( "errors" "fmt" "regexp" + "strconv" "strings" "text/scanner" @@ -21,6 +22,8 @@ var syntaxTokens = map[string]int{ "!=": NEQ, "=~": REG, "!~": NREG, + ">": GT, + "<": LT, operatorOr: OR, operatorAnd: AND, "with": WITH, @@ -37,68 +40,90 @@ type ParenthesisExpr struct { inner Expression } -func (pe ParenthesisExpr) toTree() (*tree, error) { - return pe.inner.toTree() +func (e ParenthesisExpr) toTree() (*tree, error) { + return e.inner.toTree() } -type EqExpr struct { +type kvPair struct { key string value string } -func (ee EqExpr) toTree() (*tree, error) { - return &tree{predicate: filters.Equal(ee.key, ee.value, false)}, nil +type kvPairInt struct { + key string + value int } -type NEqExpr struct { - key string - value string +type EqExpr kvPair + +func (e EqExpr) toTree() (*tree, error) { + return &tree{predicate: filters.Equal(e.key, e.value, false)}, nil } -func (ne NEqExpr) toTree() (*tree, error) { - return &tree{predicate: filters.NotEqual(ne.key, ne.value, false)}, nil +type NEqExpr kvPair + +func (e NEqExpr) toTree() (*tree, error) { + return &tree{predicate: filters.NotEqual(e.key, e.value, false)}, nil } -type RegExpr struct { - key string - value string +type EqNumExpr kvPairInt + +func (e EqNumExpr) toTree() (*tree, error) { + return &tree{predicate: filters.NumEquals(e.key, e.value)}, nil +} + +type NEqNumExpr kvPairInt + +func (e NEqNumExpr) toTree() (*tree, error) { + return &tree{predicate: filters.NumNotEquals(e.key, e.value)}, nil +} + +type LessThanExpr kvPairInt + +func (e LessThanExpr) toTree() (*tree, error) { + return &tree{predicate: filters.LessThan(e.key, e.value)}, nil +} + +type GreaterThanExpr kvPairInt + +func (e GreaterThanExpr) toTree() (*tree, error) { + return &tree{predicate: filters.GreaterThan(e.key, e.value)}, nil } -func (re RegExpr) toTree() (*tree, error) { - r, err := regexp.Compile(re.value) +type RegExpr kvPair + +func (e RegExpr) toTree() (*tree, error) { + r, err := regexp.Compile(e.value) if err != nil { return nil, fmt.Errorf("invalid regex filter: cannot compile regex [%w]", err) } - return &tree{predicate: filters.Regex(re.key, r)}, nil + return &tree{predicate: filters.Regex(e.key, r)}, nil } -type NRegExpr struct { - key string - value string -} +type NRegExpr kvPair -func (re NRegExpr) toTree() (*tree, error) { - r, err := regexp.Compile(re.value) +func (e NRegExpr) toTree() (*tree, error) { + r, err := regexp.Compile(e.value) if err != nil { return nil, fmt.Errorf("invalid regex filter: cannot compile regex [%w]", err) } - return &tree{predicate: filters.NotRegex(re.key, r)}, nil + return &tree{predicate: filters.NotRegex(e.key, r)}, nil } type WithExpr struct { key string } -func (we WithExpr) toTree() (*tree, error) { - return &tree{predicate: filters.Presence(we.key)}, nil +func (e WithExpr) toTree() (*tree, error) { + return &tree{predicate: filters.Presence(e.key)}, nil } type WithoutExpr struct { key string } -func (we WithoutExpr) toTree() (*tree, error) { - return &tree{predicate: filters.Absence(we.key)}, nil +func (e WithoutExpr) toTree() (*tree, error) { + return &tree{predicate: filters.Absence(e.key)}, nil } type LogicalExpr struct { @@ -137,9 +162,18 @@ func (l *Lexer) Lex(lval *yySymType) int { lval.value = tokenText switch token { - case scanner.Int, scanner.Float: + case scanner.Int: // Reading arbitrary number + res, err := strconv.ParseInt(tokenText, 10, 64) + if err != nil { + l.Error(err.Error()) + return 0 + } + lval.intValue = int(res) return NUMBER + case scanner.Float: + l.Error("Float values are currently unsupported") + return 0 case scanner.String, scanner.RawString: // Reading arbitrary double-quotes delimited string var err error diff --git a/pkg/utils/filters/filters.go b/pkg/utils/filters/filters.go index 61db4b6d3..5643e41f6 100644 --- a/pkg/utils/filters/filters.go +++ b/pkg/utils/filters/filters.go @@ -67,6 +67,33 @@ func NotEqual(key string, filterValue any, convertString bool) Predicate { return func(flow config.GenericMap) bool { return !pred(flow) } } +func NumEquals(key string, filterValue int) Predicate { + return castIntAndCheck(key, func(i int) bool { return i == filterValue }) +} + +func NumNotEquals(key string, filterValue int) Predicate { + return castIntAndCheck(key, func(i int) bool { return i != filterValue }) +} + +func LessThan(key string, filterValue int) Predicate { + return castIntAndCheck(key, func(i int) bool { return i < filterValue }) +} + +func GreaterThan(key string, filterValue int) Predicate { + return castIntAndCheck(key, func(i int) bool { return i > filterValue }) +} + +func castIntAndCheck(key string, check func(int) bool) Predicate { + return func(flow config.GenericMap) bool { + if val, found := flow[key]; found { + if cast, err := utils.ConvertToInt(val); err == nil { + return check(cast) + } + } + return false + } +} + func Regex(key string, filterRegex *regexp.Regexp) Predicate { return func(flow config.GenericMap) bool { if val, found := flow[key]; found { diff --git a/pkg/utils/filters/filters_test.go b/pkg/utils/filters/filters_test.go index 71baa29a0..c7011c54c 100644 --- a/pkg/utils/filters/filters_test.go +++ b/pkg/utils/filters/filters_test.go @@ -12,6 +12,7 @@ var flow = config.GenericMap{ "namespace": "foo", "name": "bar", "bytes": 15, + "int32_bytes": int32(15), "other_namespace": "foo", } @@ -128,3 +129,40 @@ func Test_Filters_extractVarLookups(t *testing.T) { variables = extractVarLookups("") assert.Empty(t, variables) } + +func TestFilterNumeric(t *testing.T) { + // LessThan + pred := LessThan("bytes", 20) + assert.True(t, pred(flow)) + + pred = LessThan("bytes", 10) + assert.False(t, pred(flow)) + + // int32 + pred = LessThan("int32_bytes", 20) + assert.True(t, pred(flow)) + + pred = LessThan("int32_bytes", 10) + assert.False(t, pred(flow)) + + // GreaterThan + pred = GreaterThan("bytes", 20) + assert.False(t, pred(flow)) + + pred = GreaterThan("bytes", 10) + assert.True(t, pred(flow)) + + // NumEquals + pred = NumEquals("bytes", 15) + assert.True(t, pred(flow)) + + pred = NumEquals("bytes", 10) + assert.False(t, pred(flow)) + + // NumNotEquals + pred = NumNotEquals("bytes", 10) + assert.True(t, pred(flow)) + + pred = NumNotEquals("bytes", 15) + assert.False(t, pred(flow)) +} From 655106acdcdf4e7a303e4280ef28fe272a6a93d6 Mon Sep 17 00:00:00 2001 From: Joel Takvorian Date: Wed, 23 Apr 2025 13:17:54 +0200 Subject: [PATCH 7/7] Numeric filters: manage >=/<= --- docs/filtering.md | 4 +- pkg/dsl/eval_test.go | 28 +++++++++++++- pkg/dsl/expr.y | 4 +- pkg/dsl/expr.y.go | 74 ++++++++++++++++++++++-------------- pkg/dsl/lexer.go | 14 +++++++ pkg/utils/filters/filters.go | 8 ++++ 6 files changed, 99 insertions(+), 33 deletions(-) diff --git a/docs/filtering.md b/docs/filtering.md index 1c13ddb64..abc64cd8c 100644 --- a/docs/filtering.md +++ b/docs/filtering.md @@ -16,8 +16,8 @@ The syntax includes: - not equals `!=` - matches regexp `=~` - not matches regexp `!~` - - greater than `>` - - less than `<` + - greater than (or equal) `>` / `>=` + - less than (or equal) `<` / `<=` - Unary operations - field is present: `with(field)` - field is absent: `without(field)` diff --git a/pkg/dsl/eval_test.go b/pkg/dsl/eval_test.go index 629a04798..22113cccd 100644 --- a/pkg/dsl/eval_test.go +++ b/pkg/dsl/eval_test.go @@ -105,7 +105,7 @@ func TestWithout(t *testing.T) { } func TestNumeric(t *testing.T) { - predicate, err := Parse(`flowdirection=0 and bytes > 15`) + predicate, err := Parse(`flowdirection=0 and bytes > 15 and packets <= 2`) assert.NoError(t, err) assert.NotNil(t, predicate) @@ -113,6 +113,7 @@ func TestNumeric(t *testing.T) { "srcnamespace": "plop", "flowdirection": 0, "bytes": 20, + "packets": 1, }) assert.True(t, result) @@ -120,6 +121,7 @@ func TestNumeric(t *testing.T) { "srcnamespace": "plop", "flowdirection": int16(0), "bytes": int16(20), + "packets": int16(1), }) assert.True(t, result) @@ -127,6 +129,7 @@ func TestNumeric(t *testing.T) { "srcnamespace": "plop", "flowdirection": 1, "bytes": 20, + "packets": 1, }) assert.False(t, result) @@ -134,12 +137,35 @@ func TestNumeric(t *testing.T) { "srcnamespace": "plop", "flowdirection": 0, "bytes": 10, + "packets": 1, }) assert.False(t, result) result = predicate(config.GenericMap{ "srcnamespace": "plop", "bytes": 20, + "packets": 1, + }) + assert.False(t, result) + + result = predicate(config.GenericMap{ + "flowdirection": 0, + "bytes": 20, + "packets": 2, + }) + assert.True(t, result) + + result = predicate(config.GenericMap{ + "flowdirection": 0, + "bytes": 15, + "packets": 2, + }) + assert.False(t, result) + + result = predicate(config.GenericMap{ + "flowdirection": 0, + "bytes": 20, + "packets": 3, }) assert.False(t, result) } diff --git a/pkg/dsl/expr.y b/pkg/dsl/expr.y index 81810d96f..e01a9e7a6 100644 --- a/pkg/dsl/expr.y +++ b/pkg/dsl/expr.y @@ -11,7 +11,7 @@ package dsl %type root %type expr -%token NF_FIELD STRING AND OR EQ NEQ GT LT REG NREG OPEN_PARENTHESIS CLOSE_PARENTHESIS WITH WITHOUT +%token NF_FIELD STRING AND OR EQ NEQ GT LT GE LE REG NREG OPEN_PARENTHESIS CLOSE_PARENTHESIS WITH WITHOUT %token NUMBER %left AND %left OR @@ -35,6 +35,8 @@ expr: | NF_FIELD NEQ NUMBER { $$ = NEqNumExpr{key: $1, value: $3} } | NF_FIELD LT NUMBER { $$ = LessThanExpr{key: $1, value: $3} } | NF_FIELD GT NUMBER { $$ = GreaterThanExpr{key: $1, value: $3} } + | NF_FIELD LE NUMBER { $$ = LessOrEqualThanExpr{key: $1, value: $3} } + | NF_FIELD GE NUMBER { $$ = GreaterOrEqualThanExpr{key: $1, value: $3} } | NF_FIELD REG STRING { $$ = RegExpr{key: $1, value: $3} } | NF_FIELD NREG STRING { $$ = NRegExpr{key: $1, value: $3} } %% diff --git a/pkg/dsl/expr.y.go b/pkg/dsl/expr.y.go index a1b49408a..0c560d4cb 100644 --- a/pkg/dsl/expr.y.go +++ b/pkg/dsl/expr.y.go @@ -23,13 +23,15 @@ const EQ = 57350 const NEQ = 57351 const GT = 57352 const LT = 57353 -const REG = 57354 -const NREG = 57355 -const OPEN_PARENTHESIS = 57356 -const CLOSE_PARENTHESIS = 57357 -const WITH = 57358 -const WITHOUT = 57359 -const NUMBER = 57360 +const GE = 57354 +const LE = 57355 +const REG = 57356 +const NREG = 57357 +const OPEN_PARENTHESIS = 57358 +const CLOSE_PARENTHESIS = 57359 +const WITH = 57360 +const WITHOUT = 57361 +const NUMBER = 57362 var yyToknames = [...]string{ "$end", @@ -43,6 +45,8 @@ var yyToknames = [...]string{ "NEQ", "GT", "LT", + "GE", + "LE", "REG", "NREG", "OPEN_PARENTHESIS", @@ -58,7 +62,7 @@ const yyEofCode = 1 const yyErrCode = 2 const yyInitialStackSize = 16 -//line pkg/dsl/expr.y:40 +//line pkg/dsl/expr.y:42 //line yacctab:1 var yyExca = [...]int8{ @@ -69,48 +73,48 @@ var yyExca = [...]int8{ const yyPrivate = 57344 -const yyLast = 35 +const yyLast = 39 var yyAct = [...]int8{ - 6, 28, 25, 23, 27, 7, 8, 32, 31, 11, - 3, 8, 4, 5, 20, 26, 24, 12, 13, 15, - 14, 16, 17, 2, 10, 7, 8, 9, 30, 29, - 22, 18, 19, 21, 1, + 6, 12, 13, 15, 14, 17, 16, 18, 19, 27, + 25, 32, 3, 34, 4, 5, 11, 31, 30, 29, + 7, 8, 8, 36, 28, 26, 35, 10, 2, 7, + 8, 22, 9, 33, 24, 23, 20, 21, 1, } var yyPact = [...]int16{ - -4, -1000, 19, -4, 10, -5, 9, -4, -4, -1, - 29, 26, -2, -3, -14, -17, 24, 23, 4, -1000, - -1000, -7, -8, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, + -4, -1000, 23, -4, 11, 0, -7, -4, -4, 14, + 31, 30, 5, 4, -1, -2, -3, -9, 28, 8, + 15, -1000, -1000, 9, 6, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, } var yyPgo = [...]int8{ - 0, 34, 23, + 0, 38, 28, } var yyR1 = [...]int8{ 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, } var yyR2 = [...]int8{ 0, 1, 3, 3, 3, 4, 4, 3, 3, 3, - 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, } var yyChk = [...]int16{ - -1000, -1, -2, 14, 16, 17, 4, 6, 7, -2, - 14, 14, 8, 9, 11, 10, 12, 13, -2, -2, - 15, 4, 4, 5, 18, 5, 18, 18, 18, 5, - 5, 15, 15, + -1000, -1, -2, 16, 18, 19, 4, 6, 7, -2, + 16, 16, 8, 9, 11, 10, 13, 12, 14, 15, + -2, -2, 17, 4, 4, 5, 20, 5, 20, 20, + 20, 20, 20, 5, 5, 17, 17, } var yyDef = [...]int8{ 0, -2, 1, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, - 2, 0, 0, 7, 9, 8, 10, 11, 12, 13, - 14, 5, 6, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 3, 4, 2, 0, 0, 7, 9, 8, 10, 11, + 12, 13, 14, 15, 16, 5, 6, } var yyTok1 = [...]int8{ @@ -119,7 +123,7 @@ var yyTok1 = [...]int8{ var yyTok2 = [...]int8{ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 16, 17, 18, + 12, 13, 14, 15, 16, 17, 18, 19, 20, } var yyTok3 = [...]int8{ @@ -540,11 +544,23 @@ yydefault: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/dsl/expr.y:38 { - yyVAL.expr = RegExpr{key: yyDollar[1].value, value: yyDollar[3].value} + yyVAL.expr = LessOrEqualThanExpr{key: yyDollar[1].value, value: yyDollar[3].intValue} } case 14: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/dsl/expr.y:39 + { + yyVAL.expr = GreaterOrEqualThanExpr{key: yyDollar[1].value, value: yyDollar[3].intValue} + } + case 15: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:40 + { + yyVAL.expr = RegExpr{key: yyDollar[1].value, value: yyDollar[3].value} + } + case 16: + yyDollar = yyS[yypt-3 : yypt+1] +//line pkg/dsl/expr.y:41 { yyVAL.expr = NRegExpr{key: yyDollar[1].value, value: yyDollar[3].value} } diff --git a/pkg/dsl/lexer.go b/pkg/dsl/lexer.go index a234e2c12..199513f27 100644 --- a/pkg/dsl/lexer.go +++ b/pkg/dsl/lexer.go @@ -24,6 +24,8 @@ var syntaxTokens = map[string]int{ "!~": NREG, ">": GT, "<": LT, + ">=": GE, + "<=": LE, operatorOr: OR, operatorAnd: AND, "with": WITH, @@ -90,6 +92,18 @@ func (e GreaterThanExpr) toTree() (*tree, error) { return &tree{predicate: filters.GreaterThan(e.key, e.value)}, nil } +type LessOrEqualThanExpr kvPairInt + +func (e LessOrEqualThanExpr) toTree() (*tree, error) { + return &tree{predicate: filters.LessOrEqualThan(e.key, e.value)}, nil +} + +type GreaterOrEqualThanExpr kvPairInt + +func (e GreaterOrEqualThanExpr) toTree() (*tree, error) { + return &tree{predicate: filters.GreaterOrEqualThan(e.key, e.value)}, nil +} + type RegExpr kvPair func (e RegExpr) toTree() (*tree, error) { diff --git a/pkg/utils/filters/filters.go b/pkg/utils/filters/filters.go index 5643e41f6..21b62dac3 100644 --- a/pkg/utils/filters/filters.go +++ b/pkg/utils/filters/filters.go @@ -83,6 +83,14 @@ func GreaterThan(key string, filterValue int) Predicate { return castIntAndCheck(key, func(i int) bool { return i > filterValue }) } +func LessOrEqualThan(key string, filterValue int) Predicate { + return castIntAndCheck(key, func(i int) bool { return i <= filterValue }) +} + +func GreaterOrEqualThan(key string, filterValue int) Predicate { + return castIntAndCheck(key, func(i int) bool { return i >= filterValue }) +} + func castIntAndCheck(key string, check func(int) bool) Predicate { return func(flow config.GenericMap) bool { if val, found := flow[key]; found {