Skip to content

Commit

Permalink
Remove toki
Browse files Browse the repository at this point in the history
Remove the toki module, replaced with a rewritten tokre.
This is a tokenizer based on regular expressions.
The API is similar, but not the same.
The tokre module is pulled into the same repo, as there are
not many users of the original code.
Fixes issue #551
  • Loading branch information
mbells committed Sep 24, 2024
1 parent b30db90 commit 651bf8a
Show file tree
Hide file tree
Showing 7 changed files with 216 additions and 50 deletions.
1 change: 0 additions & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ require (
github.com/sirupsen/logrus v1.9.3
github.com/streadway/amqp v0.0.0-20170521212453-dfe15e360485
github.com/stretchr/testify v1.8.4
github.com/taylorchu/toki v0.0.0-20141019163204-20e86122596c
github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c
)

Expand Down
2 changes: 0 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,6 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/taylorchu/toki v0.0.0-20141019163204-20e86122596c h1:FPVNYOiTjaGNfDYevBEiLLu9iSkBdvnfMcnR6qKkwbg=
github.com/taylorchu/toki v0.0.0-20141019163204-20e86122596c/go.mod h1:YYyjUTaSaC2W8KSnbqKGHDKPd/e/8c88su0LP0NkUfk=
github.com/tinylib/msgp v1.1.0 h1:9fQd+ICuRIu/ue4vxJZu6/LzxN0HwMds2nq/0cFvxHU=
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=
Expand Down
70 changes: 35 additions & 35 deletions imperatives/imperatives.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ import (
"github.com/grafana/carbon-relay-ng/rewriter"
"github.com/grafana/carbon-relay-ng/route"
"github.com/grafana/carbon-relay-ng/table"
"github.com/grafana/carbon-relay-ng/tokre"
"github.com/grafana/metrictank/cluster/partitioner"
"github.com/taylorchu/toki"
)

const (
addBlack toki.Token = iota
addBlack tokre.Token = iota
addBlock
addAgg
addRouteSendAllMatch
Expand Down Expand Up @@ -99,7 +99,7 @@ const (
// we should make sure we apply changes atomatically. e.g. when changing dest between address A and pickle=false and B with pickle=true,
// we should never half apply the change if one of them fails.

var tokens = []toki.Def{
var tokens = []tokre.Def{
{Token: addBlack, Pattern: "addBlack"},
{Token: addBlock, Pattern: "addBlock"},
{Token: addAgg, Pattern: "addAgg"},
Expand Down Expand Up @@ -192,7 +192,7 @@ var errFmtModRoute = errors.New("modRoute <routeKey> <prefix/sub/regex=>")
var errOrgId0 = errors.New("orgId must be a number > 0")

func Apply(table table.Interface, cmd string) error {
s := toki.NewScanner(tokens)
s := tokre.NewScanner(tokens)
s.SetInput(strings.Replace(cmd, " ", " ## ", -1)) // token library skips whitespace but for us double space is significant
t := s.Next()
switch t.Token {
Expand Down Expand Up @@ -229,7 +229,7 @@ func Apply(table table.Interface, cmd string) error {
}
}

func readAddAgg(s *toki.Scanner, table table.Interface) error {
func readAddAgg(s *tokre.Scanner, table table.Interface) error {
t := s.Next()
if t.Token != sumFn && t.Token != avgFn && t.Token != minFn && t.Token != maxFn && t.Token != lastFn && t.Token != deltaFn && t.Token != countFn && t.Token != deriveFn && t.Token != stdevFn {
return errors.New("invalid function. need avg/max/min/sum/last/count/delta/derive/stdev")
Expand All @@ -250,7 +250,7 @@ func readAddAgg(s *toki.Scanner, table table.Interface) error {
t = s.Next()
}
// scan for prefix/sub/regex=, stop when we hit a bare word (outFmt)
for ; t.Token != toki.EOF && t.Token != word; t = s.Next() {
for ; t.Token != tokre.EOF && t.Token != word; t = s.Next() {
switch t.Token {
case optPrefix:
if t = s.Next(); t.Token != word {
Expand Down Expand Up @@ -316,7 +316,7 @@ func readAddAgg(s *toki.Scanner, table table.Interface) error {
dropRaw := false

t = s.Next()
for ; t.Token != toki.EOF; t = s.Next() {
for ; t.Token != tokre.EOF; t = s.Next() {
switch t.Token {
case optCache:
t = s.Next()
Expand Down Expand Up @@ -356,7 +356,7 @@ func readAddAgg(s *toki.Scanner, table table.Interface) error {
return nil
}

func readAddBlock(s *toki.Scanner, table table.Interface) error {
func readAddBlock(s *tokre.Scanner, table table.Interface) error {
prefix := ""
notPrefix := ""
sub := ""
Expand Down Expand Up @@ -411,7 +411,7 @@ func readAddBlock(s *toki.Scanner, table table.Interface) error {
return nil
}

func readAddRoute(s *toki.Scanner, table table.Interface, constructor func(key string, matcher matcher.Matcher, destinations []*destination.Destination) (route.Route, error)) error {
func readAddRoute(s *tokre.Scanner, table table.Interface, constructor func(key string, matcher matcher.Matcher, destinations []*destination.Destination) (route.Route, error)) error {
t := s.Next()
if t.Token != word {
return errFmtAddRoute
Expand Down Expand Up @@ -444,7 +444,7 @@ func readAddRoute(s *toki.Scanner, table table.Interface, constructor func(key s
return nil
}

func readAddRouteConsistentHashing(s *toki.Scanner, table table.Interface, withFix bool) error {
func readAddRouteConsistentHashing(s *tokre.Scanner, table table.Interface, withFix bool) error {
t := s.Next()
if t.Token != word {
return errFmtAddRoute
Expand Down Expand Up @@ -476,7 +476,7 @@ func readAddRouteConsistentHashing(s *toki.Scanner, table table.Interface, withF
table.AddRoute(route)
return nil
}
func readAddRouteGrafanaNet(s *toki.Scanner, table table.Interface) error {
func readAddRouteGrafanaNet(s *tokre.Scanner, table table.Interface) error {
t := s.Next()
if t.Token != word {
return errFmtAddRouteGrafanaNet
Expand Down Expand Up @@ -519,7 +519,7 @@ func readAddRouteGrafanaNet(s *toki.Scanner, table table.Interface) error {

t = s.Next()

for ; t.Token != toki.EOF; t = s.Next() {
for ; t.Token != tokre.EOF; t = s.Next() {
switch t.Token {
case optAggregationFile:
t = s.Next()
Expand Down Expand Up @@ -663,7 +663,7 @@ func readAddRouteGrafanaNet(s *toki.Scanner, table table.Interface) error {
return nil
}

func readAddRouteKafkaMdm(s *toki.Scanner, table table.Interface) error {
func readAddRouteKafkaMdm(s *tokre.Scanner, table table.Interface) error {
t := s.Next()
if t.Token != word {
return errFmtAddRouteKafkaMdm
Expand Down Expand Up @@ -739,7 +739,7 @@ func readAddRouteKafkaMdm(s *toki.Scanner, table table.Interface) error {
var saslMechanism, saslUsername, saslPassword string

t = s.Next()
for ; t.Token != toki.EOF; t = s.Next() {
for ; t.Token != tokre.EOF; t = s.Next() {
switch t.Token {
case optBlocking:
t = s.Next()
Expand Down Expand Up @@ -864,7 +864,7 @@ func readAddRouteKafkaMdm(s *toki.Scanner, table table.Interface) error {
return nil
}

func readAddRoutePubSub(s *toki.Scanner, table table.Interface) error {
func readAddRoutePubSub(s *tokre.Scanner, table table.Interface) error {
t := s.Next()
if t.Token != word {
return errFmtAddRoutePubSub
Expand Down Expand Up @@ -900,7 +900,7 @@ func readAddRoutePubSub(s *toki.Scanner, table table.Interface) error {
var blocking = false

t = s.Next()
for ; t.Token != toki.EOF; t = s.Next() {
for ; t.Token != tokre.EOF; t = s.Next() {
switch t.Token {
case optPubSubCodec:
t = s.Next()
Expand Down Expand Up @@ -973,8 +973,8 @@ func readAddRoutePubSub(s *toki.Scanner, table table.Interface) error {
return nil
}

func readAddRewriter(s *toki.Scanner, table table.Interface) error {
var t *toki.Result
func readAddRewriter(s *tokre.Scanner, table table.Interface) error {
var t tokre.Result
if t = s.Next(); t.Token != word {
return errFmtAddRewriter
}
Expand Down Expand Up @@ -1002,7 +1002,7 @@ func readAddRewriter(s *toki.Scanner, table table.Interface) error {
return nil
}

func readDelRoute(s *toki.Scanner, table table.Interface) error {
func readDelRoute(s *tokre.Scanner, table table.Interface) error {
t := s.Next()
if t.Token != word {
return errors.New("need route key")
Expand All @@ -1011,7 +1011,7 @@ func readDelRoute(s *toki.Scanner, table table.Interface) error {
return table.DelRoute(key)
}

func readModDest(s *toki.Scanner, table table.Interface) error {
func readModDest(s *tokre.Scanner, table table.Interface) error {
t := s.Next()
if t.Token != word {
return errFmtAddRoute
Expand All @@ -1028,10 +1028,10 @@ func readModDest(s *toki.Scanner, table table.Interface) error {
}

opts := make(map[string]string)
for t.Token != toki.EOF {
for t.Token != tokre.EOF {
t = s.Next()
switch t.Token {
case toki.EOF:
case tokre.EOF:
break
case optAddr:
if t = s.Next(); t.Token != word {
Expand Down Expand Up @@ -1079,18 +1079,18 @@ func readModDest(s *toki.Scanner, table table.Interface) error {
return table.UpdateDestination(key, index, opts)
}

func readModRoute(s *toki.Scanner, table table.Interface) error {
func readModRoute(s *tokre.Scanner, table table.Interface) error {
t := s.Next()
if t.Token != word {
return errFmtAddRoute
}
key := string(t.Value)

opts := make(map[string]string)
for t.Token != toki.EOF {
for t.Token != tokre.EOF {
t = s.Next()
switch t.Token {
case toki.EOF:
case tokre.EOF:
break
case optPrefix:
if t = s.Next(); t.Token != word {
Expand Down Expand Up @@ -1136,14 +1136,14 @@ func readModRoute(s *toki.Scanner, table table.Interface) error {
// we should read and apply all destinations at once,
// or at least make sure we apply them to the global datastruct at once,
// otherwise we can destabilize things / send wrong traffic, etc
func readDestinations(s *toki.Scanner, table table.Interface, allowMatcher bool, routeKey string) (destinations []*destination.Destination, err error) {
func readDestinations(s *tokre.Scanner, table table.Interface, allowMatcher bool, routeKey string) (destinations []*destination.Destination, err error) {
t := s.Peek()
for t.Token != toki.EOF {
for t.Token != tokre.EOF {
for t.Token == sep {
s.Next()
t = s.Peek()
}
if t.Token == toki.EOF {
if t.Token == tokre.EOF {
break
}

Expand All @@ -1158,7 +1158,7 @@ func readDestinations(s *toki.Scanner, table table.Interface, allowMatcher bool,
return destinations, nil
}

func readDestination(s *toki.Scanner, table table.Interface, allowMatcher bool, routeKey string) (dest *destination.Destination, err error) {
func readDestination(s *tokre.Scanner, table table.Interface, allowMatcher bool, routeKey string) (dest *destination.Destination, err error) {
var prefix, notPrefix, sub, notSub, regex, notRegex, addr, spoolDir string
var spool, pickle bool
flush := 1000
Expand All @@ -1180,7 +1180,7 @@ func readDestination(s *toki.Scanner, table table.Interface, allowMatcher bool,
}
addr = string(t.Value)

for t.Token != toki.EOF && t.Token != sep {
for t.Token != tokre.EOF && t.Token != sep {
t = s.Next()
switch t.Token {
case optPrefix:
Expand Down Expand Up @@ -1314,7 +1314,7 @@ func readDestination(s *toki.Scanner, table table.Interface, allowMatcher bool,
return nil, err
}
unspoolSleep = time.Duration(tmp) * time.Microsecond
case toki.EOF:
case tokre.EOF:
case sep:
break
default:
Expand All @@ -1336,7 +1336,7 @@ func readDestination(s *toki.Scanner, table table.Interface, allowMatcher bool,
}

func ParseDestinations(destinationConfigs []string, table table.Interface, allowMatcher bool, routeKey string) (destinations []*destination.Destination, err error) {
s := toki.NewScanner(tokens)
s := tokre.NewScanner(tokens)
for _, destinationConfig := range destinationConfigs {
s.SetInput(destinationConfig)

Expand All @@ -1349,13 +1349,13 @@ func ParseDestinations(destinationConfigs []string, table table.Interface, allow
return destinations, nil
}

func readRouteOpts(s *toki.Scanner) (prefix, notPrefix, sub, notSub, regex, notRegex string, err error) {
func readRouteOpts(s *tokre.Scanner) (prefix, notPrefix, sub, notSub, regex, notRegex string, err error) {
for {
t := s.Next()
switch t.Token {
case toki.EOF:
case tokre.EOF:
return
case toki.Error:
case tokre.Error:
return "", "", "", "", "", "", errors.New("read the error token instead of one i recognize")
case optPrefix:
if t = s.Next(); t.Token != word {
Expand Down
24 changes: 12 additions & 12 deletions imperatives/imperatives_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,53 +10,53 @@ import (
"github.com/grafana/carbon-relay-ng/pkg/test"
"github.com/grafana/carbon-relay-ng/route"
"github.com/grafana/carbon-relay-ng/table"
"github.com/taylorchu/toki"
"github.com/grafana/carbon-relay-ng/tokre"
)

func TestScanner(t *testing.T) {
cases := []struct {
cmd string
exp []toki.Token
exp []tokre.Token
}{
{
"addBlock prefix collectd.localhost",
[]toki.Token{addBlock, word, word},
[]tokre.Token{addBlock, word, word},
},
{
`addBlock regex ^foo\..*\.cpu+`,
[]toki.Token{addBlock, word, word},
[]tokre.Token{addBlock, word, word},
},
{
`addAgg sum ^stats\.timers\.(app|proxy|static)[0-9]+\.requests\.(.*) stats.timers._sum_$1.requests.$2 10 20`,
[]toki.Token{addAgg, sumFn, word, word, num, num},
[]tokre.Token{addAgg, sumFn, word, word, num, num},
},
{
`addAgg avg ^stats\.timers\.(app|proxy|static)[0-9]+\.requests\.(.*) stats.timers._avg_$1.requests.$2 5 10`,
[]toki.Token{addAgg, avgFn, word, word, num, num},
[]tokre.Token{addAgg, avgFn, word, word, num, num},
},
{
"addRoute sendAllMatch carbon-default 127.0.0.1:2005 spool=true pickle=false",
[]toki.Token{addRouteSendAllMatch, word, sep, word, optSpool, optTrue, optPickle, optFalse},
[]tokre.Token{addRouteSendAllMatch, word, sep, word, optSpool, optTrue, optPickle, optFalse},
},
{
"addRoute sendAllMatch carbon-tagger sub== 127.0.0.1:2006",
[]toki.Token{addRouteSendAllMatch, word, optSub, word, sep, word},
[]tokre.Token{addRouteSendAllMatch, word, optSub, word, sep, word},
},
{
"addRoute sendFirstMatch analytics regex=(Err/s|wait_time|logger) graphite.prod:2003 prefix=prod. spool=true pickle=true graphite.staging:2003 prefix=staging. spool=true pickle=true",
[]toki.Token{addRouteSendFirstMatch, word, optRegex, word, sep, word, optPrefix, word, optSpool, optTrue, optPickle, optTrue, sep, word, optPrefix, word, optSpool, optTrue, optPickle, optTrue},
[]tokre.Token{addRouteSendFirstMatch, word, optRegex, word, sep, word, optPrefix, word, optSpool, optTrue, optPickle, optTrue, sep, word, optPrefix, word, optSpool, optTrue, optPickle, optTrue},
},
{
"addRoute sendFirstMatch myRoute1 127.0.0.1:2003 notPrefix=aaa notSub=bbb notRegex=ccc",
[]toki.Token{addRouteSendFirstMatch, word, sep, word, optNotPrefix, word, optNotSub, word, optNotRegex, word},
[]tokre.Token{addRouteSendFirstMatch, word, sep, word, optNotPrefix, word, optNotSub, word, optNotRegex, word},
},
//{ disabled cause tries to read the schemas.conf file
// "addRoute grafanaNet grafanaNet http://localhost:8081/metrics your-grafana.net-api-key /path/to/storage-schemas.conf",
// []toki.Token{addRouteGrafanaNet, word, sep, word, word},
// []tokre.Token{addRouteGrafanaNet, word, sep, word, word},
//},
}
for i, c := range cases {
s := toki.NewScanner(tokens)
s := tokre.NewScanner(tokens)
s.SetInput(strings.Replace(c.cmd, " ", " ## ", -1))
for j, e := range c.exp {
r := s.Next()
Expand Down
6 changes: 6 additions & 0 deletions tokre/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Tokre

Tokenizer based on regular expressions.
Built to be super simple.
Caller provides a list of possible
tokens and the regex definitions.
Loading

0 comments on commit 651bf8a

Please sign in to comment.