Skip to content

Commit

Permalink
[chore]: enable gofumpt linter for processor (#36347)
Browse files Browse the repository at this point in the history
#### Description

[gofumpt](https://golangci-lint.run/usage/linters/#gofumpt) enforces a
stricter format than gofmt

Signed-off-by: Matthieu MOREL <[email protected]>
  • Loading branch information
mmorel-35 authored Nov 13, 2024
1 parent 7c914b9 commit 6db5d1a
Show file tree
Hide file tree
Showing 64 changed files with 2,842 additions and 2,733 deletions.
2 changes: 1 addition & 1 deletion processor/attributesprocessor/attributes_log_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ func TestAttributes_FilterLogs(t *testing.T) {
}
oCfg.Include = &filterconfig.MatchProperties{
Resources: []filterconfig.Attribute{{Key: "name", Value: "^[^i].*"}},
//Libraries: []filterconfig.InstrumentationLibrary{{Name: "^[^i].*"}},
// Libraries: []filterconfig.InstrumentationLibrary{{Name: "^[^i].*"}},
Config: *createConfig(filterset.Regexp),
}
oCfg.Exclude = &filterconfig.MatchProperties{
Expand Down
1 change: 1 addition & 0 deletions processor/attributesprocessor/attributes_metric_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,7 @@ func TestMetricAttributes_Hash(t *testing.T) {
runIndividualMetricTestCase(t, tc, mp)
}
}

func TestMetricAttributes_Convert(t *testing.T) {
testCases := []metricTestCase{
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,11 @@ type MetricIdentity struct {
MetricValueType pmetric.NumberDataPointValueType
}

const A = int32('A')
const SEP = byte(0x1E)
const SEPSTR = string(SEP)
const (
A = int32('A')
SEP = byte(0x1E)
SEPSTR = string(SEP)
)

func (mi *MetricIdentity) Write(b *bytes.Buffer) {
b.WriteRune(A + int32(mi.MetricType))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ const ø = math.MaxUint64
func TestExpoAdd(t *testing.T) {
type expdp = expotest.Histogram
type bins = expotest.Bins
var obs0 = expotest.Observe0
obs0 := expotest.Observe0

cases := []struct {
name string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import (

func TestHistoAdd(t *testing.T) {
type histdp = histotest.Histogram
var obs = histotest.Bounds(histo.DefaultBounds).Observe
obs := histotest.Bounds(histo.DefaultBounds).Observe

cases := []struct {
name string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,10 @@ import (
"github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testdata/random"
)

var rdp data.Number
var rid streams.Ident
var (
rdp data.Number
rid streams.Ident
)

func BenchmarkSamples(b *testing.B) {
b.Run("iterfn", func(b *testing.B) {
Expand Down
1 change: 0 additions & 1 deletion processor/deltatocumulativeprocessor/linear.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,6 @@ func (p *Linear) ConsumeMetrics(ctx context.Context, md pmetric.Metrics) error {
// tracked stream: add incoming delta dp to existing cumulative aggregation
return acc, delta.AccumulateInto(acc, dp)
}()

// aggregation failed, record as metric and drop datapoint
if err != nil {
p.tel.Datapoints().Inc(ctx, telemetry.Cause(err))
Expand Down
1 change: 0 additions & 1 deletion processor/deltatorateprocessor/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (

// Config defines the configuration for the processor.
type Config struct {

// List of delta sum metrics to convert to rates
Metrics []string `mapstructure:"metrics"`
}
Expand Down
154 changes: 76 additions & 78 deletions processor/deltatorateprocessor/processor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,84 +31,82 @@ type deltaToRateTest struct {
outMetrics pmetric.Metrics
}

var (
testCases = []deltaToRateTest{
{
name: "delta_to_rate_expect_same",
metrics: nil,
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
outMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
},
{
name: "delta_to_rate_one_positive",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{120, 240, 360}, {360}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
outMetrics: generateGaugeMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{1, 2, 3}, {3}},
}),
},
{
name: "delta_to_rate_with_cumulative",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{false, false},
deltaSecond: 120,
}),
outMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{false, false},
deltaSecond: 120,
}),
},
{
name: "delta_to_rate_expect_zero",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{120, 240, 360}, {360}},
isDelta: []bool{true, true},
deltaSecond: 0,
}),
outMetrics: generateGaugeMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{0, 0, 0}, {0}},
}),
},
{
name: "int64-delta_to_rate_one_positive",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricIntValues: [][]int64{{120, 240, 360}, {360}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
outMetrics: generateGaugeMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{1, 2, 3}, {3}},
}),
},
}
)
var testCases = []deltaToRateTest{
{
name: "delta_to_rate_expect_same",
metrics: nil,
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
outMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
},
{
name: "delta_to_rate_one_positive",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{120, 240, 360}, {360}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
outMetrics: generateGaugeMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{1, 2, 3}, {3}},
}),
},
{
name: "delta_to_rate_with_cumulative",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{false, false},
deltaSecond: 120,
}),
outMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{100}, {4}},
isDelta: []bool{false, false},
deltaSecond: 120,
}),
},
{
name: "delta_to_rate_expect_zero",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{120, 240, 360}, {360}},
isDelta: []bool{true, true},
deltaSecond: 0,
}),
outMetrics: generateGaugeMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{0, 0, 0}, {0}},
}),
},
{
name: "int64-delta_to_rate_one_positive",
metrics: []string{"metric_1", "metric_2"},
inMetrics: generateSumMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricIntValues: [][]int64{{120, 240, 360}, {360}},
isDelta: []bool{true, true},
deltaSecond: 120,
}),
outMetrics: generateGaugeMetrics(testMetric{
metricNames: []string{"metric_1", "metric_2"},
metricValues: [][]float64{{1, 2, 3}, {3}},
}),
},
}

func TestCumulativeToDeltaProcessor(t *testing.T) {
for _, test := range testCases {
Expand Down
6 changes: 4 additions & 2 deletions processor/filterprocessor/expr_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,10 @@ import (
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/filterconfig"
)

const filteredMetric = "p0_metric_1"
const filteredAttrKey = "pt-label-key-1"
const (
filteredMetric = "p0_metric_1"
filteredAttrKey = "pt-label-key-1"
)

var filteredAttrVal = pcommon.NewValueStr("pt-label-val-1")

Expand Down
1 change: 0 additions & 1 deletion processor/groupbyattrsprocessor/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ package groupbyattrsprocessor // import "github.com/open-telemetry/opentelemetry

// Config is the configuration for the processor.
type Config struct {

// GroupByKeys describes the attribute names that are going to be used for grouping.
// Empty value is allowed, since processor in such case can compact data
GroupByKeys []string `mapstructure:"keys"`
Expand Down
13 changes: 7 additions & 6 deletions processor/groupbyattrsprocessor/factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,7 @@ import (
"github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor/internal/metadata"
)

var (
consumerCapabilities = consumer.Capabilities{MutatesData: true}
)
var consumerCapabilities = consumer.Capabilities{MutatesData: true}

// NewFactory returns a new factory for the Filter processor.
func NewFactory() processor.Factory {
Expand Down Expand Up @@ -64,7 +62,8 @@ func createTracesProcessor(
ctx context.Context,
set processor.Settings,
cfg component.Config,
nextConsumer consumer.Traces) (processor.Traces, error) {
nextConsumer consumer.Traces,
) (processor.Traces, error) {
oCfg := cfg.(*Config)
gap, err := createGroupByAttrsProcessor(set, oCfg.GroupByKeys)
if err != nil {
Expand All @@ -85,7 +84,8 @@ func createLogsProcessor(
ctx context.Context,
set processor.Settings,
cfg component.Config,
nextConsumer consumer.Logs) (processor.Logs, error) {
nextConsumer consumer.Logs,
) (processor.Logs, error) {
oCfg := cfg.(*Config)
gap, err := createGroupByAttrsProcessor(set, oCfg.GroupByKeys)
if err != nil {
Expand All @@ -106,7 +106,8 @@ func createMetricsProcessor(
ctx context.Context,
set processor.Settings,
cfg component.Config,
nextConsumer consumer.Metrics) (processor.Metrics, error) {
nextConsumer consumer.Metrics,
) (processor.Metrics, error) {
oCfg := cfg.(*Config)
gap, err := createGroupByAttrsProcessor(set, oCfg.GroupByKeys)
if err != nil {
Expand Down
4 changes: 1 addition & 3 deletions processor/groupbyattrsprocessor/processor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,7 @@ import (
"go.opentelemetry.io/otel/sdk/metric/metricdata"
)

var (
attrMap = prepareAttributeMap()
)
var attrMap = prepareAttributeMap()

func prepareAttributeMap() pcommon.Map {
am := pcommon.NewMap()
Expand Down
1 change: 0 additions & 1 deletion processor/groupbytraceprocessor/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (

// Config is the configuration for the processor.
type Config struct {

// NumTraces is the max number of traces to keep in memory waiting for the duration.
// Default: 1_000_000.
NumTraces int `mapstructure:"num_traces"`
Expand Down
12 changes: 7 additions & 5 deletions processor/groupbytraceprocessor/event.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,13 @@ var (
}
)

type eventType int
type event struct {
typ eventType
payload any
}
type (
eventType int
event struct {
typ eventType
payload any
}
)

type tracesWithID struct {
id pcommon.TraceID
Expand Down
3 changes: 2 additions & 1 deletion processor/groupbytraceprocessor/factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ func createTracesProcessor(
_ context.Context,
params processor.Settings,
cfg component.Config,
nextConsumer consumer.Traces) (processor.Traces, error) {
nextConsumer consumer.Traces,
) (processor.Traces, error) {
oCfg := cfg.(*Config)

var st storage
Expand Down
Loading

0 comments on commit 6db5d1a

Please sign in to comment.