diff --git a/filter/feature.go b/filter/feature.go index 325f5f6..0be5376 100644 --- a/filter/feature.go +++ b/filter/feature.go @@ -76,7 +76,7 @@ func (f *FeaturesFilter) add(fs Features) { } // Match returns true if a filter matches given features. -func (f FeaturesFilter) Match(fs Features) bool { +func (f *FeaturesFilter) Match(fs Features) bool { n := &f.root for _, v := range fs { if k := n.has(v); k >= 0 { @@ -92,7 +92,7 @@ func (f FeaturesFilter) Match(fs Features) bool { } // String implements string interface. -func (f FeaturesFilter) String() string { +func (f *FeaturesFilter) String() string { var buf strings.Builder filterString(&buf, 0, &f.root) return buf.String() diff --git a/filter/filter_test.go b/filter/filter_test.go index 560c59b..d696d9d 100644 --- a/filter/filter_test.go +++ b/filter/filter_test.go @@ -156,10 +156,10 @@ func Benchmark_TokenFilter_WordFilter(b *testing.B) { b.Run("specific token filter", func(b *testing.B) { tokens := tnz.Tokenize(input) words := []string{"人魚", "南", "の"} - filter := filter.NewWordFilter(words) + fl := filter.NewWordFilter(words) b.ResetTimer() for i := 0; i < b.N; i++ { - filter.Keep(&tokens) + fl.Keep(&tokens) } }) } diff --git a/filter/pos_test.go b/filter/pos_test.go index 7c68d78..6b1d89e 100644 --- a/filter/pos_test.go +++ b/filter/pos_test.go @@ -76,10 +76,10 @@ func TestPOSFilter_Match(t *testing.T) { for _, v := range testdata { t.Run(v.title, func(t *testing.T) { - filter := filter.NewPOSFilter(v.featuresList...) + fl := filter.NewPOSFilter(v.featuresList...) var got []string for _, token := range tokens { - if filter.Match(token.POS()) { + if fl.Match(token.POS()) { got = append(got, token.Surface) } } @@ -126,9 +126,9 @@ func TestPOSFilter_Keep(t *testing.T) { for _, v := range testdata { t.Run(v.title, func(t *testing.T) { tokens := tnz.Tokenize(input) - filter := filter.NewPOSFilter(v.featuresList...) + fl := filter.NewPOSFilter(v.featuresList...) var got []string - filter.Keep(&tokens) + fl.Keep(&tokens) for _, token := range tokens { got = append(got, token.Surface) } @@ -139,8 +139,8 @@ func TestPOSFilter_Keep(t *testing.T) { } t.Run("empty input test", func(t *testing.T) { - filter := filter.NewPOSFilter(nil) - filter.Keep(nil) + fl := filter.NewPOSFilter(nil) + fl.Keep(nil) }) } @@ -182,9 +182,9 @@ func TestPOSFilter_Drop(t *testing.T) { for _, v := range testdata { t.Run(v.title, func(t *testing.T) { tokens := tnz.Tokenize(input) - filter := filter.NewPOSFilter(v.featuresList...) + fl := filter.NewPOSFilter(v.featuresList...) var got []string - filter.Drop(&tokens) + fl.Drop(&tokens) for _, token := range tokens { got = append(got, token.Surface) } @@ -195,8 +195,8 @@ func TestPOSFilter_Drop(t *testing.T) { } t.Run("empty input test", func(t *testing.T) { - filter := filter.NewPOSFilter(nil) - filter.Drop(nil) + fl := filter.NewPOSFilter(nil) + fl.Drop(nil) }) } diff --git a/filter/word_test.go b/filter/word_test.go index 58fc6ed..4b9ccba 100644 --- a/filter/word_test.go +++ b/filter/word_test.go @@ -44,10 +44,10 @@ func TestWordFilter_Match(t *testing.T) { for _, v := range testdata { t.Run(v.title, func(t *testing.T) { - filter := filter.NewWordFilter(v.wordList) + fl := filter.NewWordFilter(v.wordList) var got []string for _, token := range tokens { - if filter.Match(token.Surface) { + if fl.Match(token.Surface) { got = append(got, token.Surface) } } @@ -92,9 +92,9 @@ func TestWordFilter_Keep(t *testing.T) { for _, v := range testdata { t.Run(v.title, func(t *testing.T) { tokens := tnz.Tokenize(input) - filter := filter.NewWordFilter(v.wordList) + fl := filter.NewWordFilter(v.wordList) var got []string - filter.Keep(&tokens) + fl.Keep(&tokens) for _, token := range tokens { got = append(got, token.Surface) } @@ -105,8 +105,8 @@ func TestWordFilter_Keep(t *testing.T) { } t.Run("empty input test", func(t *testing.T) { - filter := filter.NewWordFilter(nil) - filter.Keep(nil) + fl := filter.NewWordFilter(nil) + fl.Keep(nil) }) } @@ -146,9 +146,9 @@ func TestWordFilter_Drop(t *testing.T) { for _, v := range testdata { t.Run(v.title, func(t *testing.T) { tokens := tnz.Tokenize(input) - filter := filter.NewWordFilter(v.wordList) + fl := filter.NewWordFilter(v.wordList) var got []string - filter.Drop(&tokens) + fl.Drop(&tokens) for _, token := range tokens { got = append(got, token.Surface) } @@ -159,8 +159,8 @@ func TestWordFilter_Drop(t *testing.T) { } t.Run("empty input test", func(t *testing.T) { - filter := filter.NewWordFilter(nil) - filter.Drop(nil) + fl := filter.NewWordFilter(nil) + fl.Drop(nil) }) }