diff --git a/receiver/hostmetricsreceiver/config.go b/receiver/hostmetricsreceiver/config.go index b6caa11f4a7c..a26d389be1fd 100644 --- a/receiver/hostmetricsreceiver/config.go +++ b/receiver/hostmetricsreceiver/config.go @@ -16,17 +16,14 @@ import ( "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal" ) -const ( - scrapersKey = "scrapers" -) - // Config defines configuration for HostMetrics receiver. type Config struct { scraperhelper.ControllerConfig `mapstructure:",squash"` - Scrapers map[string]internal.Config `mapstructure:"-"` // RootPath is the host's root directory (linux only). RootPath string `mapstructure:"root_path"` + Scrapers map[component.Type]internal.Config `mapstructure:"-"` + // Collection interval for metadata. // Metadata of the particular entity is collected when the entity changes. // In addition metadata of all entities is collected periodically even if no changes happen. @@ -36,18 +33,17 @@ type Config struct { } var ( - _ component.Config = (*Config)(nil) - _ confmap.Unmarshaler = (*Config)(nil) + _ component.ConfigValidator = (*Config)(nil) + _ confmap.Unmarshaler = (*Config)(nil) ) // Validate checks the receiver configuration is valid func (cfg *Config) Validate() error { var err error if len(cfg.Scrapers) == 0 { - err = multierr.Append(err, errors.New("must specify at least one scraper when using hostmetrics receiver")) + err = errors.New("must specify at least one scraper when using hostmetrics receiver") } - err = multierr.Append(err, validateRootPath(cfg.RootPath)) - return err + return multierr.Append(err, validateRootPath(cfg.RootPath)) } // Unmarshal a config.Parser into the config struct. @@ -57,41 +53,43 @@ func (cfg *Config) Unmarshal(componentParser *confmap.Conf) error { } // load the non-dynamic config normally - err := componentParser.Unmarshal(cfg, confmap.WithIgnoreUnused()) - if err != nil { + if err := componentParser.Unmarshal(cfg, confmap.WithIgnoreUnused()); err != nil { return err } // dynamically load the individual collector configs based on the key name - cfg.Scrapers = map[string]internal.Config{} + cfg.Scrapers = map[component.Type]internal.Config{} - scrapersSection, err := componentParser.Sub(scrapersKey) + scrapersSection, err := componentParser.Sub("scrapers") if err != nil { return err } - for key := range scrapersSection.ToStringMap() { - factory, ok := getScraperFactory(key) + for keyStr := range scrapersSection.ToStringMap() { + key, err := component.NewType(keyStr) + if err != nil { + return fmt.Errorf("invalid scraper key name: %s", key) + } + factory, ok := scraperFactories[key] if !ok { return fmt.Errorf("invalid scraper key: %s", key) } - collectorCfg := factory.CreateDefaultConfig() - collectorViperSection, err := scrapersSection.Sub(key) + scraperSection, err := scrapersSection.Sub(keyStr) if err != nil { return err } - err = collectorViperSection.Unmarshal(collectorCfg) - if err != nil { + scraperCfg := factory.CreateDefaultConfig() + if err = scraperSection.Unmarshal(scraperCfg); err != nil { return fmt.Errorf("error reading settings for scraper type %q: %w", key, err) } - collectorCfg.SetRootPath(cfg.RootPath) + scraperCfg.SetRootPath(cfg.RootPath) envMap := setGoPsutilEnvVars(cfg.RootPath, &osEnv{}) - collectorCfg.SetEnvMap(envMap) + scraperCfg.SetEnvMap(envMap) - cfg.Scrapers[key] = collectorCfg + cfg.Scrapers[key] = scraperCfg } return nil diff --git a/receiver/hostmetricsreceiver/config_test.go b/receiver/hostmetricsreceiver/config_test.go index 053c010eecc0..6546ff5dac06 100644 --- a/receiver/hostmetricsreceiver/config_test.go +++ b/receiver/hostmetricsreceiver/config_test.go @@ -44,8 +44,8 @@ func TestLoadConfig(t *testing.T) { id: component.NewID(metadata.Type), expected: func() component.Config { cfg := createDefaultConfig().(*Config) - cfg.Scrapers = map[string]internal.Config{ - cpuscraper.TypeStr: func() internal.Config { + cfg.Scrapers = map[component.Type]internal.Config{ + cpuscraper.Type: func() internal.Config { cfg := (&cpuscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg @@ -62,34 +62,34 @@ func TestLoadConfig(t *testing.T) { CollectionInterval: 30 * time.Second, InitialDelay: time.Second, }, - Scrapers: map[string]internal.Config{ - cpuscraper.TypeStr: func() internal.Config { + Scrapers: map[component.Type]internal.Config{ + cpuscraper.Type: func() internal.Config { cfg := (&cpuscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg }(), - diskscraper.TypeStr: func() internal.Config { + diskscraper.Type: func() internal.Config { cfg := (&diskscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg }(), - loadscraper.TypeStr: (func() internal.Config { + loadscraper.Type: (func() internal.Config { cfg := (&loadscraper.Factory{}).CreateDefaultConfig() cfg.(*loadscraper.Config).CPUAverage = true cfg.SetEnvMap(common.EnvMap{}) return cfg })(), - filesystemscraper.TypeStr: func() internal.Config { + filesystemscraper.Type: func() internal.Config { cfg := (&filesystemscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg }(), - memoryscraper.TypeStr: func() internal.Config { + memoryscraper.Type: func() internal.Config { cfg := (&memoryscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg }(), - networkscraper.TypeStr: (func() internal.Config { + networkscraper.Type: (func() internal.Config { cfg := (&networkscraper.Factory{}).CreateDefaultConfig() cfg.(*networkscraper.Config).Include = networkscraper.MatchConfig{ Interfaces: []string{"test1"}, @@ -98,17 +98,17 @@ func TestLoadConfig(t *testing.T) { cfg.SetEnvMap(common.EnvMap{}) return cfg })(), - processesscraper.TypeStr: func() internal.Config { + processesscraper.Type: func() internal.Config { cfg := (&processesscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg }(), - pagingscraper.TypeStr: func() internal.Config { + pagingscraper.Type: func() internal.Config { cfg := (&pagingscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg }(), - processscraper.TypeStr: (func() internal.Config { + processscraper.Type: (func() internal.Config { cfg := (&processscraper.Factory{}).CreateDefaultConfig() cfg.(*processscraper.Config).Include = processscraper.MatchConfig{ Names: []string{"test2", "test3"}, @@ -117,7 +117,7 @@ func TestLoadConfig(t *testing.T) { cfg.SetEnvMap(common.EnvMap{}) return cfg })(), - systemscraper.TypeStr: (func() internal.Config { + systemscraper.Type: (func() internal.Config { cfg := (&systemscraper.Factory{}).CreateDefaultConfig() cfg.SetEnvMap(common.EnvMap{}) return cfg diff --git a/receiver/hostmetricsreceiver/factory.go b/receiver/hostmetricsreceiver/factory.go index 8273a0d49153..7d3cecae62f0 100644 --- a/receiver/hostmetricsreceiver/factory.go +++ b/receiver/hostmetricsreceiver/factory.go @@ -37,17 +37,17 @@ const ( // This file implements Factory for HostMetrics receiver. var ( - scraperFactories = map[string]internal.ScraperFactory{ - cpuscraper.TypeStr: &cpuscraper.Factory{}, - diskscraper.TypeStr: &diskscraper.Factory{}, - loadscraper.TypeStr: &loadscraper.Factory{}, - filesystemscraper.TypeStr: &filesystemscraper.Factory{}, - memoryscraper.TypeStr: &memoryscraper.Factory{}, - networkscraper.TypeStr: &networkscraper.Factory{}, - pagingscraper.TypeStr: &pagingscraper.Factory{}, - processesscraper.TypeStr: &processesscraper.Factory{}, - processscraper.TypeStr: &processscraper.Factory{}, - systemscraper.TypeStr: &systemscraper.Factory{}, + scraperFactories = map[component.Type]internal.ScraperFactory{ + cpuscraper.Type: &cpuscraper.Factory{}, + diskscraper.Type: &diskscraper.Factory{}, + filesystemscraper.Type: &filesystemscraper.Factory{}, + loadscraper.Type: &loadscraper.Factory{}, + memoryscraper.Type: &memoryscraper.Factory{}, + networkscraper.Type: &networkscraper.Factory{}, + pagingscraper.Type: &pagingscraper.Factory{}, + processesscraper.Type: &processesscraper.Factory{}, + processscraper.Type: &processscraper.Factory{}, + systemscraper.Type: &systemscraper.Factory{}, } ) @@ -60,14 +60,6 @@ func NewFactory() receiver.Factory { receiver.WithLogs(createLogsReceiver, metadata.LogsStability)) } -func getScraperFactory(key string) (internal.ScraperFactory, bool) { - if factory, ok := scraperFactories[key]; ok { - return factory, true - } - - return nil, false -} - // createDefaultConfig creates the default configuration for receiver. func createDefaultConfig() component.Config { return &Config{ @@ -115,7 +107,7 @@ func createAddScraperOptions( ctx context.Context, set receiver.Settings, config *Config, - factories map[string]internal.ScraperFactory, + factories map[component.Type]internal.ScraperFactory, ) ([]scraperhelper.ScraperControllerOption, error) { scraperControllerOptions := make([]scraperhelper.ScraperControllerOption, 0, len(config.Scrapers)) @@ -136,7 +128,7 @@ func createAddScraperOptions( return scraperControllerOptions, nil } -func createHostMetricsScraper(ctx context.Context, set receiver.Settings, key string, cfg internal.Config, factories map[string]internal.ScraperFactory) (s scraper.Metrics, ok bool, err error) { +func createHostMetricsScraper(ctx context.Context, set receiver.Settings, key component.Type, cfg internal.Config, factories map[component.Type]internal.ScraperFactory) (s scraper.Metrics, ok bool, err error) { factory := factories[key] if factory == nil { ok = false diff --git a/receiver/hostmetricsreceiver/factory_test.go b/receiver/hostmetricsreceiver/factory_test.go index c22d92d967ed..d9cd23736c4a 100644 --- a/receiver/hostmetricsreceiver/factory_test.go +++ b/receiver/hostmetricsreceiver/factory_test.go @@ -9,6 +9,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/consumer/consumertest" "go.opentelemetry.io/collector/pipeline" @@ -47,7 +48,7 @@ func TestCreateReceiver_ScraperKeyConfigError(t *testing.T) { const errorKey string = "error" factory := NewFactory() - cfg := &Config{Scrapers: map[string]internal.Config{errorKey: &mockConfig{}}} + cfg := &Config{Scrapers: map[component.Type]internal.Config{component.MustNewType(errorKey): &mockConfig{}}} _, err := factory.CreateMetrics(context.Background(), creationSet, cfg, consumertest.NewNop()) assert.EqualError(t, err, fmt.Sprintf("host metrics scraper factory not found for key: %q", errorKey)) diff --git a/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go b/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go index e7d8d0b176af..02768160ff68 100644 --- a/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go +++ b/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go @@ -13,6 +13,7 @@ import ( "github.com/shirou/gopsutil/v4/common" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/consumer/consumertest" @@ -71,19 +72,6 @@ var systemSpecificMetrics = map[string][]string{ "solaris": {"system.filesystem.inodes.usage", "system.paging.faults"}, } -var factories = map[string]internal.ScraperFactory{ - cpuscraper.TypeStr: &cpuscraper.Factory{}, - diskscraper.TypeStr: &diskscraper.Factory{}, - filesystemscraper.TypeStr: &filesystemscraper.Factory{}, - loadscraper.TypeStr: &loadscraper.Factory{}, - memoryscraper.TypeStr: &memoryscraper.Factory{}, - networkscraper.TypeStr: &networkscraper.Factory{}, - pagingscraper.TypeStr: &pagingscraper.Factory{}, - processesscraper.TypeStr: &processesscraper.Factory{}, - processscraper.TypeStr: &processscraper.Factory{}, - systemscraper.TypeStr: &systemscraper.Factory{}, -} - type testEnv struct { env map[string]string } @@ -101,38 +89,36 @@ func (e *testEnv) Set(k, v string) error { } func TestGatherMetrics_EndToEnd(t *testing.T) { - scraperFactories = factories - sink := new(consumertest.MetricsSink) cfg := &Config{ ControllerConfig: scraperhelper.ControllerConfig{ CollectionInterval: 100 * time.Millisecond, }, - Scrapers: map[string]internal.Config{ - cpuscraper.TypeStr: scraperFactories[cpuscraper.TypeStr].CreateDefaultConfig(), - diskscraper.TypeStr: scraperFactories[diskscraper.TypeStr].CreateDefaultConfig(), - filesystemscraper.TypeStr: (&filesystemscraper.Factory{}).CreateDefaultConfig(), - loadscraper.TypeStr: scraperFactories[loadscraper.TypeStr].CreateDefaultConfig(), - memoryscraper.TypeStr: scraperFactories[memoryscraper.TypeStr].CreateDefaultConfig(), - networkscraper.TypeStr: scraperFactories[networkscraper.TypeStr].CreateDefaultConfig(), - pagingscraper.TypeStr: scraperFactories[pagingscraper.TypeStr].CreateDefaultConfig(), - processesscraper.TypeStr: scraperFactories[processesscraper.TypeStr].CreateDefaultConfig(), + Scrapers: map[component.Type]internal.Config{ + cpuscraper.Type: scraperFactories[cpuscraper.Type].CreateDefaultConfig(), + diskscraper.Type: scraperFactories[diskscraper.Type].CreateDefaultConfig(), + filesystemscraper.Type: (&filesystemscraper.Factory{}).CreateDefaultConfig(), + loadscraper.Type: scraperFactories[loadscraper.Type].CreateDefaultConfig(), + memoryscraper.Type: scraperFactories[memoryscraper.Type].CreateDefaultConfig(), + networkscraper.Type: scraperFactories[networkscraper.Type].CreateDefaultConfig(), + pagingscraper.Type: scraperFactories[pagingscraper.Type].CreateDefaultConfig(), + processesscraper.Type: scraperFactories[processesscraper.Type].CreateDefaultConfig(), }, } if runtime.GOOS == "linux" || runtime.GOOS == "windows" { - cfg.Scrapers[processscraper.TypeStr] = scraperFactories[processscraper.TypeStr].CreateDefaultConfig() + cfg.Scrapers[processscraper.Type] = scraperFactories[processscraper.Type].CreateDefaultConfig() } - receiver, err := NewFactory().CreateMetrics(context.Background(), creationSet, cfg, sink) + recv, err := NewFactory().CreateMetrics(context.Background(), creationSet, cfg, sink) require.NoError(t, err, "Failed to create metrics receiver: %v", err) ctx, cancelFn := context.WithCancel(context.Background()) - err = receiver.Start(ctx, componenttest.NewNopHost()) + err = recv.Start(ctx, componenttest.NewNopHost()) require.NoError(t, err, "Failed to start metrics receiver: %v", err) - defer func() { assert.NoError(t, receiver.Shutdown(context.Background())) }() + defer func() { assert.NoError(t, recv.Shutdown(context.Background())) }() // canceling the context provided to Start should not cancel any async processes initiated by the receiver cancelFn() @@ -213,7 +199,7 @@ func appendMapInto(m1 map[string]struct{}, m2 map[string]struct{}) { } } -const mockTypeStr = "mock" +var mockType = component.MustNewType("mock") type mockConfig struct{} @@ -230,13 +216,13 @@ func (m *errFactory) CreateMetricsScraper(context.Context, receiver.Settings, in func TestGatherMetrics_ScraperKeyConfigError(t *testing.T) { tmp := scraperFactories - scraperFactories = map[string]internal.ScraperFactory{} + scraperFactories = map[component.Type]internal.ScraperFactory{} defer func() { scraperFactories = tmp }() sink := new(consumertest.MetricsSink) - cfg := &Config{Scrapers: map[string]internal.Config{"error": &mockConfig{}}} + cfg := &Config{Scrapers: map[component.Type]internal.Config{component.MustNewType("error"): &mockConfig{}}} _, err := NewFactory().CreateMetrics(context.Background(), creationSet, cfg, sink) require.Error(t, err) } @@ -244,13 +230,13 @@ func TestGatherMetrics_ScraperKeyConfigError(t *testing.T) { func TestGatherMetrics_CreateMetricsScraperError(t *testing.T) { mFactory := &errFactory{} tmp := scraperFactories - scraperFactories = map[string]internal.ScraperFactory{mockTypeStr: mFactory} + scraperFactories = map[component.Type]internal.ScraperFactory{mockType: mFactory} defer func() { scraperFactories = tmp }() sink := new(consumertest.MetricsSink) - cfg := &Config{Scrapers: map[string]internal.Config{mockTypeStr: &mockConfig{}}} + cfg := &Config{Scrapers: map[component.Type]internal.Config{mockType: &mockConfig{}}} _, err := NewFactory().CreateMetrics(context.Background(), creationSet, cfg, sink) require.Error(t, err) } @@ -276,8 +262,6 @@ func (s *notifyingSink) ConsumeMetrics(_ context.Context, md pmetric.Metrics) er } func benchmarkScrapeMetrics(b *testing.B, cfg *Config) { - scraperFactories = factories - sink := ¬ifyingSink{ch: make(chan int, 10)} tickerCh := make(chan time.Time) @@ -304,7 +288,7 @@ func benchmarkScrapeMetrics(b *testing.B, cfg *Config) { func Benchmark_ScrapeCpuMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{cpuscraper.TypeStr: (&cpuscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{cpuscraper.Type: (&cpuscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -313,7 +297,7 @@ func Benchmark_ScrapeCpuMetrics(b *testing.B) { func Benchmark_ScrapeDiskMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{diskscraper.TypeStr: (&diskscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{diskscraper.Type: (&diskscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -322,7 +306,7 @@ func Benchmark_ScrapeDiskMetrics(b *testing.B) { func Benchmark_ScrapeFileSystemMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{filesystemscraper.TypeStr: (&filesystemscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{filesystemscraper.Type: (&filesystemscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -331,7 +315,7 @@ func Benchmark_ScrapeFileSystemMetrics(b *testing.B) { func Benchmark_ScrapeLoadMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{loadscraper.TypeStr: (&loadscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{loadscraper.Type: (&loadscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -340,7 +324,7 @@ func Benchmark_ScrapeLoadMetrics(b *testing.B) { func Benchmark_ScrapeMemoryMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{memoryscraper.TypeStr: (&memoryscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{memoryscraper.Type: (&memoryscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -349,7 +333,7 @@ func Benchmark_ScrapeMemoryMetrics(b *testing.B) { func Benchmark_ScrapeNetworkMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{networkscraper.TypeStr: (&networkscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{networkscraper.Type: (&networkscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -358,7 +342,7 @@ func Benchmark_ScrapeNetworkMetrics(b *testing.B) { func Benchmark_ScrapeProcessesMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{processesscraper.TypeStr: (&processesscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{processesscraper.Type: (&processesscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -367,7 +351,7 @@ func Benchmark_ScrapeProcessesMetrics(b *testing.B) { func Benchmark_ScrapePagingMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{pagingscraper.TypeStr: (&pagingscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{pagingscraper.Type: (&pagingscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -380,7 +364,7 @@ func Benchmark_ScrapeProcessMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{processscraper.TypeStr: (&processscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{processscraper.Type: (&processscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -393,7 +377,7 @@ func Benchmark_ScrapeUptimeMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{systemscraper.TypeStr: (&systemscraper.Factory{}).CreateDefaultConfig()}, + Scrapers: map[component.Type]internal.Config{systemscraper.Type: (&systemscraper.Factory{}).CreateDefaultConfig()}, } benchmarkScrapeMetrics(b, cfg) @@ -402,15 +386,15 @@ func Benchmark_ScrapeUptimeMetrics(b *testing.B) { func Benchmark_ScrapeSystemMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{ - cpuscraper.TypeStr: (&cpuscraper.Factory{}).CreateDefaultConfig(), - diskscraper.TypeStr: (&diskscraper.Factory{}).CreateDefaultConfig(), - filesystemscraper.TypeStr: (&filesystemscraper.Factory{}).CreateDefaultConfig(), - loadscraper.TypeStr: (&loadscraper.Factory{}).CreateDefaultConfig(), - memoryscraper.TypeStr: (&memoryscraper.Factory{}).CreateDefaultConfig(), - networkscraper.TypeStr: (&networkscraper.Factory{}).CreateDefaultConfig(), - pagingscraper.TypeStr: (&pagingscraper.Factory{}).CreateDefaultConfig(), - processesscraper.TypeStr: (&processesscraper.Factory{}).CreateDefaultConfig(), + Scrapers: map[component.Type]internal.Config{ + cpuscraper.Type: (&cpuscraper.Factory{}).CreateDefaultConfig(), + diskscraper.Type: (&diskscraper.Factory{}).CreateDefaultConfig(), + filesystemscraper.Type: (&filesystemscraper.Factory{}).CreateDefaultConfig(), + loadscraper.Type: (&loadscraper.Factory{}).CreateDefaultConfig(), + memoryscraper.Type: (&memoryscraper.Factory{}).CreateDefaultConfig(), + networkscraper.Type: (&networkscraper.Factory{}).CreateDefaultConfig(), + pagingscraper.Type: (&pagingscraper.Factory{}).CreateDefaultConfig(), + processesscraper.Type: (&processesscraper.Factory{}).CreateDefaultConfig(), }, } @@ -424,16 +408,16 @@ func Benchmark_ScrapeSystemAndProcessMetrics(b *testing.B) { cfg := &Config{ ControllerConfig: scraperhelper.NewDefaultControllerConfig(), - Scrapers: map[string]internal.Config{ - cpuscraper.TypeStr: &cpuscraper.Config{}, - diskscraper.TypeStr: &diskscraper.Config{}, - filesystemscraper.TypeStr: (&filesystemscraper.Factory{}).CreateDefaultConfig(), - loadscraper.TypeStr: &loadscraper.Config{}, - memoryscraper.TypeStr: &memoryscraper.Config{}, - networkscraper.TypeStr: &networkscraper.Config{}, - pagingscraper.TypeStr: (&pagingscraper.Factory{}).CreateDefaultConfig(), - processesscraper.TypeStr: &processesscraper.Config{}, - systemscraper.TypeStr: &systemscraper.Config{}, + Scrapers: map[component.Type]internal.Config{ + cpuscraper.Type: &cpuscraper.Config{}, + diskscraper.Type: &diskscraper.Config{}, + filesystemscraper.Type: (&filesystemscraper.Factory{}).CreateDefaultConfig(), + loadscraper.Type: &loadscraper.Config{}, + memoryscraper.Type: &memoryscraper.Config{}, + networkscraper.Type: &networkscraper.Config{}, + pagingscraper.Type: (&pagingscraper.Factory{}).CreateDefaultConfig(), + processesscraper.Type: &processesscraper.Config{}, + systemscraper.Type: &systemscraper.Config{}, }, } diff --git a/receiver/hostmetricsreceiver/integration_test.go b/receiver/hostmetricsreceiver/integration_test.go index 775e0cbb4910..ec4a2de201f1 100644 --- a/receiver/hostmetricsreceiver/integration_test.go +++ b/receiver/hostmetricsreceiver/integration_test.go @@ -41,8 +41,8 @@ func Test_ProcessScrape(t *testing.T) { Config: filterset.Config{MatchType: filterset.Regexp}, Names: []string{"sleep"}, } - rCfg.Scrapers = map[string]internal.Config{ - "process": pCfg, + rCfg.Scrapers = map[component.Type]internal.Config{ + processscraper.Type: pCfg, } }), scraperinttest.WithExpectedFile(expectedFile), @@ -73,8 +73,8 @@ func Test_ProcessScrapeWithCustomRootPath(t *testing.T) { pCfg := (&processscraper.Factory{}).CreateDefaultConfig().(*processscraper.Config) pCfg.SetRootPath(rootPath) pCfg.SetEnvMap(setGoPsutilEnvVars(rootPath, &osEnv{})) - rCfg.Scrapers = map[string]internal.Config{ - "process": pCfg, + rCfg.Scrapers = map[component.Type]internal.Config{ + processscraper.Type: pCfg, } }), scraperinttest.WithExpectedFile(expectedFile), @@ -103,8 +103,8 @@ func Test_ProcessScrapeWithBadRootPathAndEnvVar(t *testing.T) { pCfg := (&processscraper.Factory{}).CreateDefaultConfig().(*processscraper.Config) pCfg.SetRootPath(badRootPath) pCfg.SetEnvMap(setGoPsutilEnvVars(badRootPath, &osEnv{})) - rCfg.Scrapers = map[string]internal.Config{ - "process": pCfg, + rCfg.Scrapers = map[component.Type]internal.Config{ + processscraper.Type: pCfg, } rCfg.RootPath = badRootPath }), diff --git a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/factory.go index 129d822d12e8..f690f807f9d1 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/factory.go @@ -6,6 +6,7 @@ package cpuscraper // import "github.com/open-telemetry/opentelemetry-collector- import ( "context" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -15,9 +16,9 @@ import ( // This file implements Factory for CPU scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "cpu" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("cpu") ) // Factory is the Factory for scraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/factory.go index d3c68d336b34..60079b438cc0 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/factory.go @@ -6,6 +6,7 @@ package diskscraper // import "github.com/open-telemetry/opentelemetry-collector import ( "context" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -15,9 +16,9 @@ import ( // This file implements Factory for Disk scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "disk" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("disk") ) // Factory is the Factory for scraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/factory.go index 287cba25bf60..4ad026c913e0 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/factory.go @@ -7,6 +7,7 @@ import ( "context" "os" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -16,19 +17,14 @@ import ( // This file implements Factory for FileSystem scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "filesystem" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("filesystem") ) // Factory is the Factory for scraper. type Factory struct{} -// Type gets the type of the scraper config created by this Factory. -func (f *Factory) Type() string { - return TypeStr -} - // CreateDefaultConfig creates the default configuration for the Scraper. func (f *Factory) CreateDefaultConfig() internal.Config { return &Config{ diff --git a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/factory.go index 1ee38bef7842..d2748eb6127e 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/factory.go @@ -6,6 +6,7 @@ package loadscraper // import "github.com/open-telemetry/opentelemetry-collector import ( "context" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -15,9 +16,9 @@ import ( // This file implements Factory for Load scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "load" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("load") ) // Factory is the Factory for scraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/factory.go index 231fbf8afce6..1807938d3f87 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/factory.go @@ -6,6 +6,7 @@ package memoryscraper // import "github.com/open-telemetry/opentelemetry-collect import ( "context" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -15,9 +16,9 @@ import ( // This file implements Factory for Memory scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "memory" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("memory") ) // Factory is the Factory for scraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/factory.go index 2f5b5b1d87f0..8bd0976d1519 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/factory.go @@ -6,6 +6,7 @@ package networkscraper // import "github.com/open-telemetry/opentelemetry-collec import ( "context" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -15,9 +16,9 @@ import ( // This file implements Factory for Network scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "network" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("network") ) // Factory is the Factory for scraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/pagingscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/pagingscraper/factory.go index f0f0ce18dbe3..2a1f293918ba 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/pagingscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/pagingscraper/factory.go @@ -6,6 +6,7 @@ package pagingscraper // import "github.com/open-telemetry/opentelemetry-collect import ( "context" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -15,9 +16,9 @@ import ( // This file implements Factory for Paging scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "paging" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("paging") ) // Factory is the Factory for scraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/factory.go index 6f7511b34ff5..e0c548eb85f7 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/factory.go @@ -6,6 +6,7 @@ package processesscraper // import "github.com/open-telemetry/opentelemetry-coll import ( "context" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -15,9 +16,9 @@ import ( // This file implements Factory for Processes scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "processes" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("processes") ) // Factory is the Factory for scraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/processscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/processscraper/factory.go index 18b8dac96092..0ebda887fe77 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processscraper/factory.go @@ -8,6 +8,7 @@ import ( "errors" "runtime" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/featuregate" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -18,9 +19,9 @@ import ( // This file implements Factory for Process scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "process" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("process") ) var ( diff --git a/receiver/hostmetricsreceiver/internal/scraper/systemscraper/factory.go b/receiver/hostmetricsreceiver/internal/scraper/systemscraper/factory.go index caf1a7f800db..ac4a0479a356 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/systemscraper/factory.go +++ b/receiver/hostmetricsreceiver/internal/scraper/systemscraper/factory.go @@ -8,6 +8,7 @@ import ( "errors" "runtime" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" @@ -17,9 +18,9 @@ import ( // This file implements Factory for System scraper. -const ( - // TypeStr the value of "type" key in configuration. - TypeStr = "system" +var ( + // Type the value of "type" key in configuration. + Type = component.MustNewType("system") ) // Factory is the Factory for scraper.