diff --git a/README.md b/README.md index 9367a0a..3490fbc 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ for creating an intuitive and interactive command-line interface. - [x] Allow to select a specific metric to inspect, and show its series. - [x] Metric search with fuzzy search. - [x] [HTTP configuration file](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#http_config) to support custom HTTP client options like basic auth, custom headers, proxy configs, etc. +- [x] Support reading metrics from local files, to help analyze metrics that have already been collected. ## Planned Features diff --git a/cmd/cardinality.go b/cmd/cardinality.go index 971f6a0..fae5090 100644 --- a/cmd/cardinality.go +++ b/cmd/cardinality.go @@ -397,9 +397,19 @@ func registerCardinalityCommand(app *extkingpin.App) { _ bool, ) error { scrapeURL := opts.ScrapeURL + scrapeFile := opts.ScrapeFile timeoutDuration := opts.Timeout httpConfigFile := opts.HttpConfigFile + if scrapeURL == "" && scrapeFile == "" { + return errors.New("No URL or file provided to scrape metrics. " + + "Please supply a target to scrape via `--scrape.url` or `--scrape.file` flags.") + } + + if scrapeURL != "" && scrapeFile != "" { + return errors.New("The flags `--scrape.url` and `--scrape.file` are mutually exclusive.") + } + metricTable := newModel(nil, opts.OutputHeight, logger) p := tea.NewProgram(metricTable) metricTable.program = p @@ -424,7 +434,8 @@ func registerCardinalityCommand(app *extkingpin.App) { level.Info(logger).Log( "msg", "scraping", - "url", scrapeURL, + "scrape_url", scrapeURL, + "scrape_file", scrapeFile, "timeout", timeoutDuration, "max_size", maxSize, "http_config_file", httpConfigFile, @@ -433,6 +444,7 @@ func registerCardinalityCommand(app *extkingpin.App) { t0 := time.Now() scraper := scrape.NewPromScraper( scrapeURL, + scrapeFile, logger, scrape.WithTimeout(timeoutDuration), scrape.WithMaxBodySize(maxSize), diff --git a/cmd/options.go b/cmd/options.go index cb29303..d4a3cc5 100644 --- a/cmd/options.go +++ b/cmd/options.go @@ -10,6 +10,7 @@ import ( type Options struct { ScrapeURL string + ScrapeFile string OutputHeight int MaxScrapeSize string Timeout time.Duration @@ -25,10 +26,14 @@ func (o *Options) MaxScrapeSizeBytes() (int64, error) { } func (o *Options) AddFlags(app extkingpin.AppClause) { - app.Flag("scrape-url", "URL to scrape metrics from"). - Required(). + app.Flag("scrape.url", "URL to scrape metrics from"). + Default(""). StringVar(&o.ScrapeURL) + app.Flag("scrape.file", "File to scrape metrics from"). + Default(""). + StringVar(&o.ScrapeFile) + app.Flag("timeout", "Timeout for the scrape request"). Default("10s"). DurationVar(&o.Timeout) diff --git a/pkg/scrape/scraper.go b/pkg/scrape/scraper.go index 6602bfb..f841f12 100644 --- a/pkg/scrape/scraper.go +++ b/pkg/scrape/scraper.go @@ -5,6 +5,7 @@ import ( "fmt" "io" "net/http" + "os" "regexp" "strconv" "strings" @@ -23,6 +24,7 @@ import ( type PromScraper struct { httpConfigFile string scrapeURL string + scrapeFilePath string timeout time.Duration logger log.Logger series map[string]SeriesSet @@ -56,7 +58,7 @@ func WithHttpConfigFile(file string) ScraperOption { } } -func NewPromScraper(scrapeURL string, logger log.Logger, opts ...ScraperOption) *PromScraper { +func NewPromScraper(scrapeURL string, scrapeFile string, logger log.Logger, opts ...ScraperOption) *PromScraper { scOpts := &scrapeOpts{ timeout: 10 * time.Second, maxBodySize: 10 * 1024 * 1024, @@ -69,6 +71,7 @@ func NewPromScraper(scrapeURL string, logger log.Logger, opts ...ScraperOption) return &PromScraper{ scrapeURL: scrapeURL, + scrapeFilePath: scrapeFile, logger: logger, timeout: scOpts.timeout, maxBodySize: scOpts.maxBodySize, @@ -79,6 +82,58 @@ func NewPromScraper(scrapeURL string, logger log.Logger, opts ...ScraperOption) } func (ps *PromScraper) Scrape() (*Result, error) { + if ps.scrapeFilePath != "" { + return ps.scrapeFile() + } + + return ps.scrapeHTTP() +} + +func (ps *PromScraper) scrapeFile() (*Result, error) { + var ( + seriesSet map[string]SeriesSet + seriesScrapeText SeriesScrapeText + ) + + // Don't use os.ReadFile(); manually open the file so we can create an + // io.LimitReader from the file to enforce max body size. + f, err := os.Open(ps.scrapeFilePath) + if err != nil { + return &Result{}, fmt.Errorf("Failed to open file %s to scrape metrics: %w", ps.scrapeFilePath, err) + } + defer f.Close() + + body, err := io.ReadAll(io.LimitReader(f, ps.maxBodySize)) + if err != nil { + return &Result{}, fmt.Errorf("Failed reading file %s to scrape metrics: %w", ps.scrapeFilePath, err) + } + + if int64(len(body)) >= ps.maxBodySize { + level.Warn(ps.logger).Log( + "msg", "metric file body size limit exceeded", + "limit_bytes", ps.maxBodySize, + "body_size", len(body), + ) + return &Result{}, fmt.Errorf("metric file body size exceeded limit of %d bytes", ps.maxBodySize) + } + + // assume that scraping metrics from a file implies they're in text format. + contentType := "text/plain" + ps.lastScrapeContentType = contentType + seriesSet, scrapeErr := ps.extractMetrics(body, contentType) + if scrapeErr != nil { + return &Result{}, fmt.Errorf("failed to extract metrics from file: %w", scrapeErr) + } + seriesScrapeText = ps.extractMetricSeriesText(body) + + return &Result{ + Series: seriesSet, + UsedContentType: contentType, + SeriesScrapeText: seriesScrapeText, + }, nil +} + +func (ps *PromScraper) scrapeHTTP() (*Result, error) { var ( seriesSet map[string]SeriesSet scrapeErr error