Skip to content

Commit

Permalink
Add counting stats to downloader.
Browse files Browse the repository at this point in the history
  • Loading branch information
s-l-teichmann committed Aug 25, 2023
1 parent 9e329bd commit c86656c
Show file tree
Hide file tree
Showing 2 changed files with 92 additions and 7 deletions.
40 changes: 33 additions & 7 deletions cmd/csaf_downloader/downloader.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ type downloader struct {
validator csaf.RemoteValidator
forwarder *forwarder
mkdirMu sync.Mutex
statsMu sync.Mutex
stats stats
}

// failedValidationDir is the name of the sub folder
Expand Down Expand Up @@ -83,6 +85,13 @@ func (d *downloader) close() {
}
}

// addStats add stats to total stats
func (d *downloader) addStats(o *stats) {
d.statsMu.Lock()
defer d.statsMu.Unlock()
d.stats.add(o)
}

func (d *downloader) httpClient() util.Client {

hClient := http.Client{}
Expand Down Expand Up @@ -350,8 +359,12 @@ func (d *downloader) downloadWorker(
initialReleaseDate time.Time
dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339)
lower = strings.ToLower(string(label))
stats = stats{}
)

// Add collected stats back to total.
defer d.addStats(&stats)

nextAdvisory:
for {
var file csaf.AdvisoryFile
Expand All @@ -367,36 +380,40 @@ nextAdvisory:

u, err := url.Parse(file.URL())
if err != nil {
stats.downloadFailed++
slog.Warn("Ignoring invalid URL",
"url", file.URL(),
"error", err)
continue
}

if d.cfg.ignoreURL(file.URL()) {
if d.cfg.Verbose {
slog.Warn("Ignoring URL", "url", file.URL())
}
continue
}

// Ignore not conforming filenames.
filename := filepath.Base(u.Path)
if !util.ConformingFileName(filename) {
stats.filenameFailed++
slog.Warn("Ignoring none conforming filename",
"filename", filename)
continue
}

if d.cfg.ignoreURL(file.URL()) {
if d.cfg.Verbose {
slog.Warn("Ignoring URL", "url", file.URL())
}
continue
}

resp, err := client.Get(file.URL())
if err != nil {
stats.downloadFailed++
slog.Warn("Cannot GET",
"url", file.URL(),
"error", err)
continue
}

if resp.StatusCode != http.StatusOK {
stats.downloadFailed++
slog.Warn("Cannot load",
"url", file.URL(),
"status", resp.Status,
Expand Down Expand Up @@ -456,6 +473,7 @@ nextAdvisory:
tee := io.TeeReader(resp.Body, hasher)
return json.NewDecoder(tee).Decode(&doc)
}(); err != nil {
stats.downloadFailed++
slog.Warn("Downloading failed",
"url", file.URL(),
"error", err)
Expand All @@ -465,13 +483,15 @@ nextAdvisory:
// Compare the checksums.
s256Check := func() error {
if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) {
stats.sha256Failed++
return fmt.Errorf("SHA256 checksum of %s does not match", file.URL())
}
return nil
}

s512Check := func() error {
if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) {
stats.sha512Failed++
return fmt.Errorf("SHA512 checksum of %s does not match", file.URL())
}
return nil
Expand All @@ -495,6 +515,7 @@ nextAdvisory:
if sign != nil {
if err := d.checkSignature(data.Bytes(), sign); err != nil {
if !d.cfg.IgnoreSignatureCheck {
stats.signatureFailed++
return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err)
}
}
Expand All @@ -505,6 +526,7 @@ nextAdvisory:
// Validate against CSAF schema.
schemaCheck := func() error {
if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 {
stats.schemaFailed++
d.logValidationIssues(file.URL(), errors, err)
return fmt.Errorf("schema validation for %q failed", file.URL())
}
Expand All @@ -514,6 +536,7 @@ nextAdvisory:
// Validate if filename is conforming.
filenameCheck := func() error {
if err := util.IDMatchesFilename(d.eval, doc, filename); err != nil {
stats.filenameFailed++
return fmt.Errorf("filename not conforming %s: %s", file.URL(), err)
}
return nil
Expand All @@ -532,6 +555,7 @@ nextAdvisory:
return nil
}
if !rvr.Valid {
stats.remoteFailed++
return fmt.Errorf("remote validation of %q failed", file.URL())
}
return nil
Expand Down Expand Up @@ -622,6 +646,7 @@ nextAdvisory:
}
}

stats.succeeded++
slog.Info("Written advisory", "path", path)
}
}
Expand Down Expand Up @@ -680,6 +705,7 @@ func loadHash(client util.Client, p string) ([]byte, []byte, error) {

// run performs the downloads for all the given domains.
func (d *downloader) run(ctx context.Context, domains []string) error {
defer d.stats.log()
for _, domain := range domains {
if err := d.download(ctx, domain); err != nil {
return err
Expand Down
59 changes: 59 additions & 0 deletions cmd/csaf_downloader/stats.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>

package main

import "log/slog"

// stats contains counters of the downloads.
type stats struct {
downloadFailed int
filenameFailed int
schemaFailed int
remoteFailed int
sha256Failed int
sha512Failed int
signatureFailed int
succeeded int
}

// add adds other stats to this.
func (st *stats) add(o *stats) {
st.downloadFailed += o.downloadFailed
st.filenameFailed += o.filenameFailed
st.schemaFailed += o.schemaFailed
st.remoteFailed += o.remoteFailed
st.sha256Failed += o.sha256Failed
st.sha512Failed += o.sha512Failed
st.signatureFailed += o.signatureFailed
st.succeeded += o.succeeded
}

func (st *stats) totalFailed() int {
return st.downloadFailed +
st.filenameFailed +
st.schemaFailed +
st.remoteFailed +
st.sha256Failed +
st.sha512Failed +
st.signatureFailed
}

// log logs the collected stats.
func (st *stats) log() {
slog.Info("Download statistics",
"succeeded", st.succeeded,
"total_failed", st.totalFailed(),
"filename_failed", st.filenameFailed,
"download_failed", st.downloadFailed,
"schema_failed", st.schemaFailed,
"remote_failed", st.remoteFailed,
"sha256_failed", st.sha256Failed,
"sha512_failed", st.sha512Failed,
"signature_failed", st.signatureFailed)
}

0 comments on commit c86656c

Please sign in to comment.