diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index ae6ad29408..6bb4f5fd2f 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -16,10 +16,10 @@ name: Checks on: push: - branches: [main] + branches: [main, v2] pull_request: # The branches below must be a subset of the branches above - branches: [main] + branches: [main, v2] concurrency: # Pushing new changes to a branch will cancel any in-progress CI runs diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 4dc3aa8ea4..95fb0457d1 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -13,10 +13,10 @@ name: "CodeQL" on: push: - branches: [main] + branches: [main, v2] pull_request: # The branches below must be a subset of the branches above - branches: [main] + branches: [main, v2] # Restrict jobs in this workflow to have no permissions by default; permissions # should be granted per job as needed using a dedicated `permissions` block diff --git a/.github/workflows/osv-scanner-unified-action.yml b/.github/workflows/osv-scanner-unified-action.yml index 7ae55267a3..140e6feabb 100644 --- a/.github/workflows/osv-scanner-unified-action.yml +++ b/.github/workflows/osv-scanner-unified-action.yml @@ -16,11 +16,11 @@ name: OSV-Scanner Scheduled Scan on: pull_request: - branches: ["main"] + branches: ["main", "v2"] schedule: - cron: "12 12 * * 1" push: - branches: ["main"] + branches: ["main", "v2"] # Restrict jobs in this workflow to have no permissions by default; permissions # should be granted per job as needed using a dedicated `permissions` block diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 7d4d6fa439..6a589fec35 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -12,7 +12,7 @@ on: schedule: - cron: "32 22 * * 6" push: - branches: ["main"] + branches: ["main", "v2"] # Restrict jobs in this workflow to have no permissions by default; permissions # should be granted per job as needed using a dedicated `permissions` block diff --git a/.prettierignore b/.prettierignore index d63dc2c013..70ca255b4c 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,4 +1,5 @@ **/fixtures/** +**/testdata/** **/fixtures-go/** /docs/vendor/** /internal/output/html/*template.html diff --git a/CHANGELOG.md b/CHANGELOG.md index 227e1f0991..832a18f53c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,48 @@ +OSV-Scanner v2 is coming soon! The next release will start with version `v2.0.0-alpha1`. + +Here's a peek at some of the exciting upcoming features: + +- Standalone container image scanning support. + - Including support for Alpine and Debian images. +- Refactored internals to use [`osv-scalibr`](https://github.com/google/osv-scalibr) library for better extraction capabilities. +- HTML output format for clearer vulnerability results. +- More control over output format and logging. +- ...and more! + +Importantly, the CLI interface of osv-scanner will be maintained with minimal breaking changes. +Most breaking changes will only be in the API. More details in the upcoming alpha release. + +--- + +This is the final feature v1 release of osv-scanner, future releases for v1 will only contain bug fixes. + +# v1.9.1 + +### Features: + +- [Feature #1295](https://github.com/google/osv-scanner/pull/1295) Support offline database in fix subcommand. +- [Feature #1342](https://github.com/google/osv-scanner/pull/1342) Add `--experimental-offline-vulnerabilities` and `--experimental-no-resolve` flags. +- [Feature #1045](https://github.com/google/osv-scanner/pull/1045) Support private registries for Maven. +- [Feature #1226](https://github.com/google/osv-scanner/pull/1226) Support support `vulnerabilities.ignore` in package overrides. + +### Fixes: + +- [Bug #604](https://github.com/google/osv-scanner/pull/604) Use correct path separator in SARIF output when on Windows. +- [Bug #330](https://github.com/google/osv-scanner/pull/330) Warn about and ignore duplicate entries in SBOMs. +- [Bug #1325](https://github.com/google/osv-scanner/pull/1325) Set CharsetReader and Entity when reading pom.xml. +- [Bug #1310](https://github.com/google/osv-scanner/pull/1310) Update spdx license ids. +- [Bug #1288](https://github.com/google/osv-scanner/pull/1288) Sort sbom packages by PURL. +- [Bug #1285](https://github.com/google/osv-scanner/pull/1285) Improve handling if `docker` exits with a non-zero code when trying to scan images + +### API Changes: + +- Deprecate auxillary public packages: As part of the V2 update described above, we have started deprecating some of the auxillary packages + which are not commonly used to give us more room to make better API designs. These include: + - `config` + - `depsdev` + - `grouper` + - `spdx` + # v1.9.0 ### Features: diff --git a/cmd/osv-scanner/__snapshots__/main_test.snap b/cmd/osv-scanner/__snapshots__/main_test.snap index e6b78a8e58..1d0d9c5178 100755 --- a/cmd/osv-scanner/__snapshots__/main_test.snap +++ b/cmd/osv-scanner/__snapshots__/main_test.snap @@ -80,7 +80,7 @@ Loaded filter from: /fixtures/locks-many/osv-scanner.toml "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -234,7 +234,7 @@ Loaded Alpine local db from /osv-scanner/Alpine/all.zip } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -349,9 +349,9 @@ overriding license for package Packagist/league/flysystem/1.0.8 with 0BSD | LICENSE VIOLATION | ECOSYSTEM | PACKAGE | VERSION | SOURCE | +-------------------+-----------+------------------------------------------------+---------+-------------------------------------------------------+ | 0BSD | Packagist | league/flysystem | 1.0.8 | fixtures/locks-insecure/composer.lock | -| UNKNOWN | | https://github.com/flutter/buildroot.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | -| UNKNOWN | | https://github.com/brendan-duncan/archive.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | | UNKNOWN | | https://chromium.googlesource.com/chromium/src | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | +| UNKNOWN | | https://github.com/brendan-duncan/archive.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | +| UNKNOWN | | https://github.com/flutter/buildroot.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | | UNKNOWN | RubyGems | ast | 2.4.2 | fixtures/locks-many/Gemfile.lock | | 0BSD | Packagist | sentry/sdk | 2.0.4 | fixtures/locks-many/composer.lock | +-------------------+-----------+------------------------------------------------+---------+-------------------------------------------------------+ @@ -850,7 +850,7 @@ No issues found --- [TestRun/version - 1] -osv-scanner version: 1.9.0 +osv-scanner version: 1.9.1 commit: n/a built at: n/a @@ -908,6 +908,68 @@ Scanned /fixtures/call-analysis-go-project/go.mod file and found 4 pack --- +[TestRun_Docker/Fake_alpine_image - 1] +Pulling docker image ("alpine:non-existent-tag")... + +--- + +[TestRun_Docker/Fake_alpine_image - 2] +Docker command exited with code ("/usr/bin/docker pull -q alpine:non-existent-tag"): 1 +STDERR: +> Error response from daemon: manifest for alpine:non-existent-tag not found: manifest unknown: manifest unknown +failed to run docker command + +--- + +[TestRun_Docker/Fake_image_entirely - 1] +Pulling docker image ("this-image-definitely-does-not-exist-abcde")... + +--- + +[TestRun_Docker/Fake_image_entirely - 2] +Docker command exited with code ("/usr/bin/docker pull -q this-image-definitely-does-not-exist-abcde"): 1 +STDERR: +> Error response from daemon: pull access denied for this-image-definitely-does-not-exist-abcde, repository does not exist or may require 'docker login': denied: requested access to the resource is denied +failed to run docker command + +--- + +[TestRun_Docker/Real_Alpine_image - 1] +Pulling docker image ("alpine:3.18.9")... +Saving docker image ("alpine:3.18.9") to temporary file... +Scanning image... +No issues found + +--- + +[TestRun_Docker/Real_Alpine_image - 2] + +--- + +[TestRun_Docker/Real_empty_image - 1] +Pulling docker image ("hello-world")... +Saving docker image ("hello-world") to temporary file... +Scanning image... + +--- + +[TestRun_Docker/Real_empty_image - 2] +No package sources found, --help for usage information. + +--- + +[TestRun_Docker/Real_empty_image_with_tag - 1] +Pulling docker image ("hello-world:linux")... +Saving docker image ("hello-world:linux") to temporary file... +Scanning image... + +--- + +[TestRun_Docker/Real_empty_image_with_tag - 2] +No package sources found, --help for usage information. + +--- + [TestRun_GithubActions/scanning_osv-scanner_custom_format - 1] Scanned /fixtures/locks-insecure/osv-scanner-flutter-deps.json file as a osv-scanner and found 3 packages +--------------------------------+------+-----------+----------------------------+----------------------------+-------------------------------------------------------+ @@ -973,7 +1035,7 @@ Scanned /fixtures/locks-insecure/osv-scanner-flutter-deps.json file as } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -1794,6 +1856,8 @@ Filtered 16 vulnerabilities from output | https://osv.dev/DLA-3325-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/DLA-3449-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/DLA-3530-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | +| https://osv.dev/DLA-3942-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | +| https://osv.dev/DLA-3942-2 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/DSA-4539-3 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/CVE-2017-12837 | 7.5 | Debian | perl | 5.24.1-3+deb9u7 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/CVE-2017-12883 | 9.1 | Debian | perl | 5.24.1-3+deb9u7 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | @@ -1972,6 +2036,8 @@ Filtered 16 vulnerabilities from output | https://osv.dev/DLA-3325-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/DLA-3449-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/DLA-3530-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | +| https://osv.dev/DLA-3942-1 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | +| https://osv.dev/DLA-3942-2 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/DSA-4539-3 | | Debian | openssl | 1.1.0l-1~deb9u5 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/CVE-2017-12837 | 7.5 | Debian | perl | 5.24.1-3+deb9u7 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | | https://osv.dev/CVE-2017-12883 | 9.1 | Debian | perl | 5.24.1-3+deb9u7 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | @@ -2266,7 +2332,7 @@ No issues found --- [TestRun_LockfileWithExplicitParseAs/empty_works_as_an_escape_(no_fixture_because_it's_not_valid_on_Windows) - 2] -open /path/to/my:file: no such file or directory +stat /path/to/my:file: no such file or directory --- @@ -2275,7 +2341,7 @@ open /path/to/my:file: no such file or directory --- [TestRun_LockfileWithExplicitParseAs/empty_works_as_an_escape_(no_fixture_because_it's_not_valid_on_Windows)#01 - 2] -open /path/to/my:project/package-lock.json: no such file or directory +stat /path/to/my:project/package-lock.json: no such file or directory --- @@ -2284,7 +2350,7 @@ open /path/to/my:project/package-lock.json: no such file or directory --- [TestRun_LockfileWithExplicitParseAs/files_that_error_on_parsing_stop_parsable_files_from_being_checked - 2] -(extracting as Cargo.lock) could not extract from /fixtures/locks-insecure/my-package-lock.json: toml: line 1: expected '.' or '=', but got '{' instead +(extracting as rust/Cargolock) could not extract from /fixtures/locks-insecure/my-package-lock.json: toml: line 1: expected '.' or '=', but got '{' instead --- @@ -2342,7 +2408,7 @@ No issues found --- [TestRun_LockfileWithExplicitParseAs/parse-as_takes_priority,_even_if_it's_wrong - 2] -(extracting as package-lock.json) could not extract from /fixtures/locks-many/yarn.lock: invalid character '#' looking for beginning of value +(extracting as javascript/packagelockjson) could not extract from "/fixtures/locks-many/yarn.lock": invalid character '#' looking for beginning of value --- @@ -2372,6 +2438,17 @@ Scanned /fixtures/locks-insecure/composer.lock file and found 1 package --- +[TestRun_MavenTransitive/does_not_scan_transitive_dependencies_for_pom.xml_with_no-resolve - 1] +Scanning dir ./fixtures/maven-transitive/pom.xml +Scanned /fixtures/maven-transitive/pom.xml file and found 1 package +No issues found + +--- + +[TestRun_MavenTransitive/does_not_scan_transitive_dependencies_for_pom.xml_with_no-resolve - 2] + +--- + [TestRun_MavenTransitive/does_not_scan_transitive_dependencies_for_pom.xml_with_offline_mode - 1] Scanning dir ./fixtures/maven-transitive/pom.xml Scanned /fixtures/maven-transitive/pom.xml file and found 1 package @@ -2384,7 +2461,7 @@ No issues found --- -[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_datda_source - 1] +[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_data_source - 1] Scanned /fixtures/maven-transitive/registry.xml file as a pom.xml and found 59 packages +-------------------------------------+------+-----------+-----------------------------------------------+---------+----------------------------------------+ | OSV URL | CVSS | ECOSYSTEM | PACKAGE | VERSION | SOURCE | @@ -2398,7 +2475,7 @@ Scanned /fixtures/maven-transitive/registry.xml file as a pom.xml and f --- -[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_datda_source - 2] +[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_data_source - 2] --- @@ -2513,17 +2590,17 @@ Scanning image ../../internal/image/fixtures/test-node_modules-npm-empty.tar [TestRun_OCIImage/scanning_node_modules_using_npm_with_some_packages - 1] Scanning image ../../internal/image/fixtures/test-node_modules-npm-full.tar -+-------------------------------------+------+--------------+----------+------------+-------------------------------------------------------------------------------------------------------+ -| OSV URL | CVSS | ECOSYSTEM | PACKAGE | VERSION | SOURCE | -+-------------------------------------+------+--------------+----------+------------+-------------------------------------------------------------------------------------------------------+ -| https://osv.dev/CVE-2023-42363 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | -| https://osv.dev/CVE-2023-42364 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | -| https://osv.dev/CVE-2023-42365 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | -| https://osv.dev/CVE-2023-42366 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | -| https://osv.dev/GHSA-38f5-ghc2-fcmv | 9.8 | npm | cryo | 0.0.6 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/usr/app/node_modules/.package-lock.json | -| https://osv.dev/GHSA-vh95-rmgr-6w4m | 9.8 | npm | minimist | 0.0.8 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/usr/app/node_modules/.package-lock.json | -| https://osv.dev/GHSA-xvch-5gv4-984h | | | | | | -+-------------------------------------+------+--------------+----------+------------+-------------------------------------------------------------------------------------------------------+ ++-------------------------------------+------+--------------+----------+------------+--------------------------------------------------------------------------------------------------------+ +| OSV URL | CVSS | ECOSYSTEM | PACKAGE | VERSION | SOURCE | ++-------------------------------------+------+--------------+----------+------------+--------------------------------------------------------------------------------------------------------+ +| https://osv.dev/CVE-2023-42363 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | +| https://osv.dev/CVE-2023-42364 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | +| https://osv.dev/CVE-2023-42365 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | +| https://osv.dev/CVE-2023-42366 | 5.5 | Alpine:v3.19 | busybox | 1.36.1-r15 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/lib/apk/db/installed | +| https://osv.dev/GHSA-38f5-ghc2-fcmv | 9.8 | npm | cryo | 0.0.6 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/prod/app/node_modules/.package-lock.json | +| https://osv.dev/GHSA-vh95-rmgr-6w4m | 9.8 | npm | minimist | 0.0.8 | ../../internal/image/fixtures/test-node_modules-npm-full.tar:/prod/app/node_modules/.package-lock.json | +| https://osv.dev/GHSA-xvch-5gv4-984h | | | | | | ++-------------------------------------+------+--------------+----------+------------+--------------------------------------------------------------------------------------------------------+ --- diff --git a/cmd/osv-scanner/__snapshots__/update_test.snap b/cmd/osv-scanner/__snapshots__/update_test.snap index 9a1113cdc5..64b84f6318 100755 --- a/cmd/osv-scanner/__snapshots__/update_test.snap +++ b/cmd/osv-scanner/__snapshots__/update_test.snap @@ -34,7 +34,7 @@ Warning: `update` exists as both a subcommand of OSV-Scanner and as a file on th com.fasterxml.jackson.core jackson-core - 2.18.0 + 2.18.1 junit diff --git a/cmd/osv-scanner/fix/main.go b/cmd/osv-scanner/fix/main.go index cac639a4e2..2abfa05795 100644 --- a/cmd/osv-scanner/fix/main.go +++ b/cmd/osv-scanner/fix/main.go @@ -171,8 +171,9 @@ func Command(stdout, stderr io.Writer, r *reporter.Reporter) *cli.Command { }, // Offline database flags, copied from osv-scanner scan &cli.BoolFlag{ - Name: "experimental-offline", - Usage: "checks for vulnerabilities using local databases that are already cached", + Name: "experimental-offline-vulnerabilities", + Aliases: []string{"experimental-offline"}, + Usage: "checks for vulnerabilities using local databases that are already cached", }, &cli.BoolFlag{ Name: "experimental-download-offline-databases", @@ -327,7 +328,7 @@ func action(ctx *cli.Context, stdout, stderr io.Writer) (reporter.Reporter, erro } } - if ctx.Bool("experimental-offline") { + if ctx.Bool("experimental-offline-vulnerabilities") { var err error opts.Client.VulnerabilityClient, err = client.NewOSVOfflineClient( r, diff --git a/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt b/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt index 7e1060246f..0e463a4d02 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt @@ -1 +1 @@ -flask +flask==1.0.0 diff --git a/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt b/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt index 7e66a17d49..4fae28300e 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt @@ -1 +1 @@ -black +black==1.0.0 diff --git a/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt b/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt index d0dae5a60f..911f55bcf9 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt @@ -1,3 +1,3 @@ -flask -flask-cors +flask==1.0.0 +flask-cors==1.0.0 pandas==0.23.4 diff --git a/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt b/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt index e079f8a603..35663c020e 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt @@ -1 +1 @@ -pytest +pytest==1.0.0 diff --git a/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml b/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml index 80e5b8b2ca..4a3e9070b8 100644 --- a/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml +++ b/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml @@ -1,64 +1,3 @@ -[[IgnoredVulns]] -id = "GO-2022-0274" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GO-2022-0493" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-vpvm-3wq2-2wvm" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-m8cg-xc2p-r3fc" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-g2j6-57v7-gm8c" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-f3fp-gc8g-vw66" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3008-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3012-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3022-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3051-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "CVE-2022-37434" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "CVE-2018-25032" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-xr7r-f8xq-vfvv" -# ignoreUntil = n/a +[[PackageOverrides]] +ignore = true reason = "This is an intentionally vulnerable test sbom" diff --git a/cmd/osv-scanner/main.go b/cmd/osv-scanner/main.go index 595b1afe49..3c891f338c 100644 --- a/cmd/osv-scanner/main.go +++ b/cmd/osv-scanner/main.go @@ -47,6 +47,18 @@ func run(args []string, stdout, stderr io.Writer) int { }, } + // If ExitErrHandler is not set, cli will use the default cli.HandleExitCoder. + // This is not ideal as cli.HandleExitCoder checks if the error implements cli.ExitCode interface. + // + // 99% of the time, this is fine, as we do not implement cli.ExitCode in our errors, so errors pass through + // that handler untouched. + // However, because of Go's duck typing, any error that happens to have a ExitCode() function + // (e.g. *exec.ExitError) will be assumed to implement cli.ExitCode interface and cause the program to exit + // early without proper error handling. + // + // This removes the handler entirely so that behavior will not unexpectedly happen. + app.ExitErrHandler = func(_ *cli.Context, _ error) {} + args = insertDefaultCommand(args, app.Commands, app.DefaultCommand, stdout, stderr) if err := app.Run(args); err != nil { diff --git a/cmd/osv-scanner/main_test.go b/cmd/osv-scanner/main_test.go index 5d31c1fde3..26466c7486 100644 --- a/cmd/osv-scanner/main_test.go +++ b/cmd/osv-scanner/main_test.go @@ -7,6 +7,7 @@ import ( "os" "path/filepath" "reflect" + "runtime" "strings" "testing" @@ -517,7 +518,12 @@ func TestRun_LockfileWithExplicitParseAs(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - testCli(t, tt) + stdout, stderr := runCli(t, tt) + + testutility.NewSnapshot().MatchText(t, stdout) + testutility.NewSnapshot().WithWindowsReplacements(map[string]string{ + "CreateFile": "stat", + }).MatchText(t, stderr) }) } } @@ -728,6 +734,51 @@ func TestRun_Licenses(t *testing.T) { } } +func TestRun_Docker(t *testing.T) { + t.Parallel() + + testutility.SkipIfNotAcceptanceTesting(t, "Takes a long time to pull down images") + + tests := []cliTestCase{ + { + name: "Fake alpine image", + args: []string{"", "--docker", "alpine:non-existent-tag"}, + exit: 127, + }, + { + name: "Fake image entirely", + args: []string{"", "--docker", "this-image-definitely-does-not-exist-abcde"}, + exit: 127, + }, + // TODO: How to prevent these snapshots from changing constantly + { + name: "Real empty image", + args: []string{"", "--docker", "hello-world"}, + exit: 128, // No packages found + }, + { + name: "Real empty image with tag", + args: []string{"", "--docker", "hello-world:linux"}, + exit: 128, // No package found + }, + { + name: "Real Alpine image", + args: []string{"", "--docker", "alpine:3.18.9"}, + exit: 0, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Only test on linux, and mac/windows CI/CD does not come with docker preinstalled + if runtime.GOOS == "linux" { + testCli(t, tt) + } + }) + } +} + func TestRun_OCIImage(t *testing.T) { t.Parallel() @@ -911,13 +962,19 @@ func TestRun_MavenTransitive(t *testing.T) { args: []string{"", "--config=./fixtures/osv-scanner-empty-config.toml", "--experimental-offline", "--experimental-download-offline-databases", "./fixtures/maven-transitive/pom.xml"}, exit: 0, }, + { + // Direct dependencies do not have any vulnerability. + name: "does not scan transitive dependencies for pom.xml with no-resolve", + args: []string{"", "--config=./fixtures/osv-scanner-empty-config.toml", "--experimental-no-resolve", "./fixtures/maven-transitive/pom.xml"}, + exit: 0, + }, { name: "scans dependencies from multiple registries", args: []string{"", "--config=./fixtures/osv-scanner-empty-config.toml", "-L", "pom.xml:./fixtures/maven-transitive/registry.xml"}, exit: 1, }, { - name: "resolve transitive dependencies with native datda source", + name: "resolve transitive dependencies with native data source", args: []string{"", "--config=./fixtures/osv-scanner-empty-config.toml", "--experimental-resolution-data-source=native", "-L", "pom.xml:./fixtures/maven-transitive/registry.xml"}, exit: 1, }, diff --git a/cmd/osv-scanner/scan/main.go b/cmd/osv-scanner/scan/main.go index ad44158e02..0e455b886d 100644 --- a/cmd/osv-scanner/scan/main.go +++ b/cmd/osv-scanner/scan/main.go @@ -16,16 +16,25 @@ import ( "github.com/urfave/cli/v2" ) +// flags that require network access and values to disable them. +var offlineFlags = map[string]string{ + "skip-git": "true", + "experimental-offline-vulnerabilities": "true", + "experimental-no-resolve": "true", + "experimental-licenses-summary": "false", + // "experimental-licenses": "", // StringSliceFlag has to be manually cleared. +} + func Command(stdout, stderr io.Writer, r *reporter.Reporter) *cli.Command { return &cli.Command{ Name: "scan", Usage: "scans various mediums for dependencies and matches it against the OSV database", Description: "scans various mediums for dependencies and matches it against the OSV database", Flags: []cli.Flag{ - &cli.StringSliceFlag{ + &cli.StringFlag{ Name: "docker", Aliases: []string{"D"}, - Usage: "scan docker image with this name. Warning: Only run this on a trusted container image, as it runs the container image to retrieve the package versions", + Usage: "scan docker image with this name. This is a convenience function which runs `docker save` before scanning the saved image using --oci-image", TakesFile: false, }, &cli.StringSliceFlag{ @@ -109,6 +118,24 @@ func Command(stdout, stderr io.Writer, r *reporter.Reporter) *cli.Command { }, &cli.BoolFlag{ Name: "experimental-offline", + Usage: "run in offline mode, disabling any features requiring network access", + Action: func(ctx *cli.Context, b bool) error { + if !b { + return nil + } + // Disable the features requiring network access. + for flag, value := range offlineFlags { + // TODO(michaelkedar): do something if the flag was already explicitly set. + if err := ctx.Set(flag, value); err != nil { + panic(fmt.Sprintf("failed setting offline flag %s to %s: %v", flag, value, err)) + } + } + + return nil + }, + }, + &cli.BoolFlag{ + Name: "experimental-offline-vulnerabilities", Usage: "checks for vulnerabilities using local databases that are already cached", }, &cli.BoolFlag{ @@ -138,6 +165,10 @@ func Command(stdout, stderr io.Writer, r *reporter.Reporter) *cli.Command { TakesFile: true, Hidden: true, }, + &cli.BoolFlag{ + Name: "experimental-no-resolve", + Usage: "disable transitive dependency resolution of manifest files", + }, &cli.StringFlag{ Name: "experimental-resolution-data-source", Usage: "source to fetch package information from; value can be: deps.dev, native", @@ -221,20 +252,25 @@ func action(context *cli.Context, stdout, stderr io.Writer) (reporter.Reporter, callAnalysisStates = createCallAnalysisStates(context.StringSlice("call-analysis"), context.StringSlice("no-call-analysis")) } + scanLicensesAllowlist := context.StringSlice("experimental-licenses") + if context.Bool("experimental-offline") { + scanLicensesAllowlist = []string{} + } + vulnResult, err := osvscanner.DoScan(osvscanner.ScannerActions{ - LockfilePaths: context.StringSlice("lockfile"), - SBOMPaths: context.StringSlice("sbom"), - DockerContainerNames: context.StringSlice("docker"), - Recursive: context.Bool("recursive"), - SkipGit: context.Bool("skip-git"), - NoIgnore: context.Bool("no-ignore"), - ConfigOverridePath: context.String("config"), - DirectoryPaths: context.Args().Slice(), - CallAnalysisStates: callAnalysisStates, + LockfilePaths: context.StringSlice("lockfile"), + SBOMPaths: context.StringSlice("sbom"), + DockerImageName: context.String("docker"), + Recursive: context.Bool("recursive"), + SkipGit: context.Bool("skip-git"), + NoIgnore: context.Bool("no-ignore"), + ConfigOverridePath: context.String("config"), + DirectoryPaths: context.Args().Slice(), + CallAnalysisStates: callAnalysisStates, ExperimentalScannerActions: osvscanner.ExperimentalScannerActions{ LocalDBPath: context.String("experimental-local-db-path"), DownloadDatabases: context.Bool("experimental-download-offline-databases"), - CompareOffline: context.Bool("experimental-offline"), + CompareOffline: context.Bool("experimental-offline-vulnerabilities"), // License summary mode causes all // packages to appear in the json as // every package has a license - even @@ -242,9 +278,10 @@ func action(context *cli.Context, stdout, stderr io.Writer) (reporter.Reporter, ShowAllPackages: context.Bool("experimental-all-packages") || context.Bool("experimental-licenses-summary"), ScanLicensesSummary: context.Bool("experimental-licenses-summary"), - ScanLicensesAllowlist: context.StringSlice("experimental-licenses"), + ScanLicensesAllowlist: scanLicensesAllowlist, ScanOCIImage: context.String("experimental-oci-image"), TransitiveScanningActions: osvscanner.TransitiveScanningActions{ + Disabled: context.Bool("experimental-no-resolve"), NativeDataSource: context.String("experimental-resolution-data-source") == "native", MavenRegistry: context.String("experimental-maven-registry"), }, diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock index cf94c4dad3..3c90efba1e 100644 --- a/docs/Gemfile.lock +++ b/docs/Gemfile.lock @@ -242,7 +242,7 @@ GEM rb-fsevent (0.11.2) rb-inotify (0.11.1) ffi (~> 1.0) - rexml (3.3.8) + rexml (3.3.9) rouge (3.30.0) rubyzip (2.3.2) safe_yaml (1.0.5) diff --git a/docs/github-action.md b/docs/github-action.md index 3460e43aba..c28acdffd0 100644 --- a/docs/github-action.md +++ b/docs/github-action.md @@ -55,7 +55,7 @@ permissions: jobs: scan-pr: - uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.0" + uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.1" ``` ### View results @@ -98,7 +98,7 @@ permissions: jobs: scan-scheduled: - uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.0" + uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.1" ``` As written, the scanner will run on 12:30 pm UTC every Monday, and also on every push to the main branch. You can change the schedule by following the instructions [here](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule). @@ -133,7 +133,7 @@ permissions: jobs: osv-scan: - uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.0" + uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.1" with: # Only scan the top level go.mod file without recursively scanning directories since # this is pipeline is about releasing the go module and binary @@ -163,7 +163,7 @@ Results may be viewed by clicking on the details of the failed release action fr The GitHub Actions have the following optional inputs: -- `scan-args`: This value is passed to `osv-scanner` CLI after being split by each line. See the [usage](./usage) page for the available options. The `--format` and `--output` flags are already set by the reusable workflow and should not be overridden here. +- `scan-args`: This value is passed to `osv-scanner` CLI after being split by each line. See the [usage](./usage.md) page for the available options. The `--format` and `--output` flags are already set by the reusable workflow and should not be overridden here. Default: ```bash --recursive # Recursively scan subdirectories @@ -172,7 +172,7 @@ The GitHub Actions have the following optional inputs: ``` - `results-file-name`: This is the name of the final SARIF file uploaded to Github. Default: `results.sarif` -- `download-artifact`: Optional artifact to download for scanning. Can be used if you need to do some preprocessing to prepare the lockfiles for scanning. If the file names in the artifact are not standard lockfile names, make sure to add custom scan-args to specify the lockfile type and path (see [specify lockfiles](./usage#specify-lockfiles)). +- `download-artifact`: Optional artifact to download for scanning. Can be used if you need to do some preprocessing to prepare the lockfiles for scanning. If the file names in the artifact are not standard lockfile names, make sure to add custom scan-args to specify the lockfile type and path (see [specify lockfiles](./usage.md#specify-lockfiles)). - `upload-sarif`: Whether to upload the results to Security > Code Scanning. Defaults to `true`. - `fail-on-vuln`: Whether to fail the workflow when a vulnerability is found. Defaults to `true`. @@ -186,7 +186,7 @@ Examples ```yml jobs: scan-pr: - uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.0" + uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.1" with: scan-args: |- --lockfile=./path/to/lockfile1 @@ -198,7 +198,7 @@ jobs: ```yml jobs: scan-pr: - uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.0" + uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.1" with: scan-args: |- --recursive @@ -225,7 +225,7 @@ jobs: name: Vulnerability scanning # makes sure the extraction step is completed before running the scanner needs: extract-deps - uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.0" + uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.1" with: # Download the artifact uploaded in extract-deps step download-artifact: converted-OSV-Scanner-deps diff --git a/docs/guided-remediation.md b/docs/guided-remediation.md index 858c791ee6..495fa1d560 100644 --- a/docs/guided-remediation.md +++ b/docs/guided-remediation.md @@ -249,7 +249,7 @@ The following flag may be used to limit the patches allowed for your dependencie ### Data source -By default, we use the [deps.dev API](https://docs.deps.dev/api/v3alpha/) to find version and dependency information of packages during remediation. +By default, we use the [deps.dev API](https://docs.deps.dev/api/) to find version and dependency information of packages during remediation. If instead you'd like to use your ecosystem's native registry API (e.g. `https://registry.npmjs.org`), you can use the `--data-source=native` flag. `osv-scanner fix` will attempt to use the authentication specified by the native tooling (e.g. `npm config`) @@ -264,7 +264,7 @@ If your project uses mirrored or private registries, you will need to use `--dat ### Offline Vulnerability Database -The `fix` subcommand supports the `--experimental-offline` and `--experimental-download-offline-databases` flags. +The `fix` subcommand supports the `--experimental-offline-vulnerabilities` and `--experimental-download-offline-databases` flags. For more information, see [Offline Mode](./offline-mode.md). diff --git a/docs/offline-mode.md b/docs/offline-mode.md index 46efa171ec..d345d846d4 100644 --- a/docs/offline-mode.md +++ b/docs/offline-mode.md @@ -59,6 +59,8 @@ The offline database flag `--experimental-offline` causes OSV-Scanner to scan yo osv-scanner --experimental-offline ./path/to/your/dir ``` +To use offline mode for just the vulnerability database, but allow other features to possibly make network requests (e.g. [transitive dependency scanning](./supported_languages_and_lockfiles.md/#transitive-dependency-scanning)), you can use the `--experimental-offline-vulnerabilities` flag instead. + ## Download offline databases option The download offline databases flag `--experimental-download-offline-databases` allows OSV-Scanner to download or update your local database when running in offline mode, to make it easier to get started. This option only works when you also set the offline flag. diff --git a/docs/supported_languages_and_lockfiles.md b/docs/supported_languages_and_lockfiles.md index 0abd7c0c41..ff13f0bde6 100644 --- a/docs/supported_languages_and_lockfiles.md +++ b/docs/supported_languages_and_lockfiles.md @@ -72,7 +72,7 @@ Vendored dependencies have been directly copied into the project folder, but do ## Transitive dependency scanning -OSV-Scanner supports transitive dependency scanning for Maven pom.xml. This feature is enabled by default when scanning, but it is disabled in the [offline mode](./offline-mode.md). +OSV-Scanner supports transitive dependency scanning for Maven pom.xml. This feature is enabled by default when scanning, but it can be disabled using the `--experimental-no-resolve` flag. It is also disabled in the [offline mode](./offline-mode.md). OSV-Scanner uses [deps.dev’s resolver library](https://pkg.go.dev/deps.dev/util/resolve) to compute the dependency graph of a project. This graph includes all of the direct and transitive dependencies. By default, [deps.dev API](https://docs.deps.dev/api/v3/index.html) is queried for package versions and requirements. The support for private registries is [coming soon](https://github.com/google/osv-scanner/issues/1045). @@ -81,6 +81,14 @@ After the dependency resolution, the OSV database is queried for the vulnerabili {: .note } Test dependencies are not supported yet in the computed dependency graph for Maven pom.xml. +### Data source + +By default, we use the [deps.dev API](https://docs.deps.dev/api/v3/) to find version and dependency information of packages during transitive scanning. + +If instead you'd like to fetch data from [Maven Central](https://repo.maven.apache.org/maven2/), you can use the `--experimental-resolution-data-source=native` flag. + +If your project uses mirrored or private registries, in addition to setting `--experimental-resolution-data-source=native`, you will need to use the `--experimental-maven-registry=` flag to specify the registry (e.g. `--experimental-maven-registry=https://repo.maven.apache.org/maven2/`). + ## Custom Lockfiles If you have a custom lockfile that we do not support or prefer to do your own custom parsing, you can extract the custom lockfile information and create a custom intermediate file containing dependency information so that osv-scanner can still check for vulnerabilities. diff --git a/go.mod b/go.mod index caf5f92b7b..190c3fc96e 100644 --- a/go.mod +++ b/go.mod @@ -13,12 +13,12 @@ require ( github.com/charmbracelet/bubbletea v1.1.1 github.com/charmbracelet/glamour v0.8.0 github.com/charmbracelet/lipgloss v0.13.0 - github.com/dghubble/trie v0.1.0 github.com/gkampitakis/go-snaps v0.5.7 github.com/go-git/go-billy/v5 v5.5.0 github.com/go-git/go-git/v5 v5.12.0 github.com/google/go-cmp v0.6.0 github.com/google/go-containerregistry v0.20.2 + github.com/google/osv-scalibr v0.1.4-0.20241031120023-761ca671aacb github.com/ianlancetaylor/demangle v0.0.0-20240912202439-0a2b6291aafd github.com/jedib0t/go-pretty/v6 v6.6.0 github.com/muesli/reflow v0.3.0 @@ -44,7 +44,7 @@ require ( require ( dario.cat/mergo v1.0.0 // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect github.com/ProtonMail/go-crypto v1.0.0 // indirect github.com/alecthomas/chroma/v2 v2.14.0 // indirect github.com/anchore/go-struct-converter v0.0.0-20230627203149-c72ef8859ca9 // indirect @@ -58,8 +58,6 @@ require ( github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/dlclark/regexp2 v1.11.0 // indirect - github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker-credential-helpers v0.8.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/gkampitakis/ciinfo v0.3.0 // indirect @@ -82,14 +80,13 @@ require ( github.com/muesli/cancelreader v0.2.2 // indirect github.com/muesli/termenv v0.15.3-0.20240618155329-98d742f6907a // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0-rc3 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sahilm/fuzzy v0.1.1 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect - github.com/sirupsen/logrus v1.9.3 // indirect github.com/skeema/knownhosts v1.2.2 // indirect github.com/spdx/gordf v0.0.0-20221230105357-b735bd5aac89 // indirect github.com/tidwall/match v1.1.1 // indirect diff --git a/go.sum b/go.sum index 6c92190e56..36bfaa2ada 100644 --- a/go.sum +++ b/go.sum @@ -13,8 +13,8 @@ github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2 github.com/CycloneDX/cyclonedx-go v0.9.1 h1:yffaWOZsv77oTJa/SdVZYdgAgFioCeycBUKkqS2qzQM= github.com/CycloneDX/cyclonedx-go v0.9.1/go.mod h1:NE/EWvzELOFlG6+ljX/QeMlVt9VKcTwu8u0ccsACEsw= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78= github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= github.com/alecthomas/assert/v2 v2.7.0 h1:QtqSACNS3tF7oasA8CU6A6sXZSBDqnm7RfpLl9bZqbE= @@ -69,8 +69,6 @@ github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxG github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dghubble/trie v0.1.0 h1:kJnjBLFFElBwS60N4tkPvnLhnpcDxbBjIulgI8CpNGM= -github.com/dghubble/trie v0.1.0/go.mod h1:sOmnzfBNH7H92ow2292dDFWNsVQuh/izuD7otCYb1ak= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/docker/cli v27.1.1+incompatible h1:goaZxOqs4QKxznZjjBWKONQci/MywhtRv2oNn0GkeZE= @@ -111,6 +109,8 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-containerregistry v0.20.2 h1:B1wPJ1SN/S7pB+ZAimcciVD+r+yV/l/DSArMxlbwseo= github.com/google/go-containerregistry v0.20.2/go.mod h1:z38EKdKh4h7IP2gSfUUqEvalZBqs6AoLeWfUy34nQC8= +github.com/google/osv-scalibr v0.1.4-0.20241031120023-761ca671aacb h1:A7IvUJk8r3wMuuAMWxwbkE3WBp+oF/v7CcEt3nCy+lI= +github.com/google/osv-scalibr v0.1.4-0.20241031120023-761ca671aacb/go.mod h1:MbEYB+PKqEGjwMdpcoO5DWpi0+57jYgYcw2jlRy8O9Q= github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= @@ -161,8 +161,8 @@ github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0-rc3 h1:fzg1mXZFj8YdPeNkRXMg+zb88BFV0Ys52cJydRwBkb8= -github.com/opencontainers/image-spec v1.1.0-rc3/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/owenrumney/go-sarif v1.1.1/go.mod h1:dNDiPlF04ESR/6fHlPyq7gHKmrM0sHUvAGjsoh8ZH0U= github.com/owenrumney/go-sarif/v2 v2.3.3 h1:ubWDJcF5i3L/EIOER+ZyQ03IfplbSU1BLOE26uKQIIU= github.com/owenrumney/go-sarif/v2 v2.3.3/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= diff --git a/internal/config/config.go b/internal/config/config.go index d1b8b86b0c..afa7abbf13 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -16,11 +16,7 @@ import ( const osvScannerConfigName = "osv-scanner.toml" -// Ignore stuttering as that would be a breaking change -// TODO: V2 rename? -// -//nolint:revive -type ConfigManager struct { +type Manager struct { // Override to replace all other configs OverrideConfig *Config // Config to use if no config file is found alongside manifests @@ -112,17 +108,6 @@ func (c *Config) ShouldIgnorePackage(pkg models.PackageVulns) (bool, PackageOver }) } -// Deprecated: Use ShouldIgnorePackage instead -func (c *Config) ShouldIgnorePackageVersion(name, version, ecosystem string) (bool, PackageOverrideEntry) { - return c.ShouldIgnorePackage(models.PackageVulns{ - Package: models.PackageInfo{ - Name: name, - Version: version, - Ecosystem: ecosystem, - }, - }) -} - // ShouldIgnorePackageVulnerabilities determines if the given package should have its vulnerabilities ignored based on override entries in the config func (c *Config) ShouldIgnorePackageVulnerabilities(pkg models.PackageVulns) bool { overrides, _ := c.filterPackageVersionEntries(pkg, func(e PackageOverrideEntry) bool { @@ -139,17 +124,6 @@ func (c *Config) ShouldOverridePackageLicense(pkg models.PackageVulns) (bool, Pa }) } -// Deprecated: Use ShouldOverridePackageLicense instead -func (c *Config) ShouldOverridePackageVersionLicense(name, version, ecosystem string) (bool, PackageOverrideEntry) { - return c.ShouldOverridePackageLicense(models.PackageVulns{ - Package: models.PackageInfo{ - Name: name, - Version: version, - Ecosystem: ecosystem, - }, - }) -} - func shouldIgnoreTimestamp(ignoreUntil time.Time) bool { if ignoreUntil.IsZero() { // If IgnoreUntil is not set, should ignore. @@ -162,7 +136,7 @@ func shouldIgnoreTimestamp(ignoreUntil time.Time) bool { // Sets the override config by reading the config file at configPath. // Will return an error if loading the config file fails -func (c *ConfigManager) UseOverride(configPath string) error { +func (c *Manager) UseOverride(configPath string) error { config, configErr := tryLoadConfig(configPath) if configErr != nil { return configErr @@ -173,7 +147,7 @@ func (c *ConfigManager) UseOverride(configPath string) error { } // Attempts to get the config -func (c *ConfigManager) Get(r reporter.Reporter, targetPath string) Config { +func (c *Manager) Get(r reporter.Reporter, targetPath string) Config { if c.OverrideConfig != nil { return *c.OverrideConfig } diff --git a/internal/config/config_internal_test.go b/internal/config/config_internal_test.go index 2336c2ae23..fd2c8cd4a4 100644 --- a/internal/config/config_internal_test.go +++ b/internal/config/config_internal_test.go @@ -787,124 +787,6 @@ func TestConfig_ShouldIgnorePackage(t *testing.T) { } } -func TestConfig_ShouldIgnorePackageVersion(t *testing.T) { - t.Parallel() - - type args struct { - name string - version string - ecosystem string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Version-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Package-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "2.0.0", - Ecosystem: "Go", - Ignore: false, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - { - Name: "lib2", - Version: "2.0.0", - Ignore: true, - Ecosystem: "Go", - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "2.0.0", - ecosystem: "Go", - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldIgnorePackageVersion(tt.args.name, tt.args.version, tt.args.ecosystem) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnorePackageVersion() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldIgnorePackageVersion() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - func TestConfig_ShouldIgnorePackageVulnerabilities(t *testing.T) { t.Parallel() @@ -1203,118 +1085,3 @@ func TestConfig_ShouldOverridePackageLicense(t *testing.T) { }) } } - -func TestConfig_ShouldOverridePackageVersionLicense(t *testing.T) { - t.Parallel() - - type args struct { - name string - version string - ecosystem string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Exact version entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - { - name: "Version entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.1", - ecosystem: "Go", - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Name matches", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.1", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldOverridePackageVersionLicense(tt.args.name, tt.args.version, tt.args.ecosystem) - if gotOk != tt.wantOk { - t.Errorf("ShouldOverridePackageVersionLicense() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldOverridePackageVersionLicense() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} diff --git a/internal/image/__snapshots__/image_test.snap b/internal/image/__snapshots__/image_test.snap index 9d957ad396..58b8b54897 100755 --- a/internal/image/__snapshots__/image_test.snap +++ b/internal/image/__snapshots__/image_test.snap @@ -4,7 +4,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -186,7 +186,7 @@ "Lockfiles": [ { "filePath": "/go/bin/more-vuln-overwrite-less-vuln", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -214,7 +214,7 @@ }, { "filePath": "/go/bin/ptf-1.2.0", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -242,7 +242,7 @@ }, { "filePath": "/go/bin/ptf-1.3.0", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -270,7 +270,7 @@ }, { "filePath": "/go/bin/ptf-1.3.0-moved", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -298,7 +298,7 @@ }, { "filePath": "/go/bin/ptf-1.4.0", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -326,7 +326,7 @@ }, { "filePath": "/go/bin/ptf-vulnerable", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -354,7 +354,7 @@ }, { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -536,7 +536,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -754,7 +754,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -963,8 +963,8 @@ ] }, { - "filePath": "/usr/app/node_modules/.package-lock.json", - "parsedAs": "node_modules", + "filePath": "/prod/app/node_modules/.package-lock.json", + "parsedAs": "javascript/nodemodules", "packages": [ { "name": "cryo", @@ -1011,7 +1011,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -1229,7 +1229,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -1447,7 +1447,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -1665,7 +1665,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", diff --git a/internal/image/extractor.go b/internal/image/extractor.go index 6ddb7f9f16..18dad0ed63 100644 --- a/internal/image/extractor.go +++ b/internal/image/extractor.go @@ -1,57 +1,79 @@ package image import ( + "context" "errors" "fmt" - "os" - "path" - "sort" - + "io/fs" + "strings" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/language/golang/gobinary" + "github.com/google/osv-scalibr/extractor/filesystem/os/apk" + "github.com/google/osv-scalibr/extractor/filesystem/os/dpkg" + "github.com/google/osv-scanner/internal/lockfilescalibr" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/javascript/nodemodules" "github.com/google/osv-scanner/pkg/lockfile" ) // artifactExtractors contains only extractors for artifacts that are important in // the final layer of a container image -var artifactExtractors map[string]lockfile.Extractor = map[string]lockfile.Extractor{ - "node_modules": lockfile.NodeModulesExtractor{}, - "apk-installed": lockfile.ApkInstalledExtractor{}, - "dpkg": lockfile.DpkgStatusExtractor{}, - "go-binary": lockfile.GoBinaryExtractor{}, -} - -type extractorPair struct { - extractor lockfile.Extractor - name string +var artifactExtractors []filesystem.Extractor = []filesystem.Extractor{ + // TODO: Using nodemodules extractor to minimize changes of snapshots + // After annotations are added, we should switch to using packagejson. + // packagejson.New(packagejson.DefaultConfig()), + nodemodules.Extractor{}, + + apk.New(apk.DefaultConfig()), + gobinary.New(gobinary.DefaultConfig()), + // TODO: Add tests for debian containers + dpkg.New(dpkg.DefaultConfig()), } -func findArtifactExtractor(path string) []extractorPair { +func findArtifactExtractor(path string, fileInfo fs.FileInfo) []filesystem.Extractor { // Use ShouldExtract to collect and return a slice of artifactExtractors - var extractors []extractorPair - for name, extractor := range artifactExtractors { - if extractor.ShouldExtract(path) { - extractors = append(extractors, extractorPair{extractor, name}) + var extractors []filesystem.Extractor + for _, extractor := range artifactExtractors { + if extractor.FileRequired(path, fileInfo) { + extractors = append(extractors, extractor) } } return extractors } -func extractArtifactDeps(path string, layer *Layer) (lockfile.Lockfile, error) { - foundExtractors := findArtifactExtractor(path) +// Note: Output is non deterministic +func extractArtifactDeps(extractPath string, layer *Layer) ([]*extractor.Inventory, error) { + pathFileInfo, err := layer.Stat(extractPath) + if err != nil { + return nil, fmt.Errorf("attempted to get FileInfo but failed: %w", err) + } + + scalibrPath := strings.TrimPrefix(extractPath, "/") + foundExtractors := findArtifactExtractor(scalibrPath, pathFileInfo) if len(foundExtractors) == 0 { - return lockfile.Lockfile{}, fmt.Errorf("%w for %s", lockfile.ErrExtractorNotFound, path) + return nil, fmt.Errorf("%w for %s", lockfilescalibr.ErrExtractorNotFound, extractPath) } - packages := []lockfile.PackageDetails{} + inventories := []*extractor.Inventory{} var extractedAs string - for _, extPair := range foundExtractors { + for _, extractor := range foundExtractors { // File has to be reopened per extractor as each extractor moves the read cursor - f, err := OpenLayerFile(path, layer) + f, err := layer.Open(extractPath) if err != nil { - return lockfile.Lockfile{}, fmt.Errorf("attempted to open file but failed: %w", err) + return nil, fmt.Errorf("attempted to open file but failed: %w", err) + } + + scanInput := &filesystem.ScanInput{ + FS: layer, + Path: scalibrPath, + Root: "/", + Reader: f, + Info: pathFileInfo, } - newPackages, err := extPair.extractor.Extract(f) + newPackages, err := extractor.Extract(context.Background(), scanInput) f.Close() if err != nil { @@ -59,76 +81,33 @@ func extractArtifactDeps(path string, layer *Layer) (lockfile.Lockfile, error) { continue } - return lockfile.Lockfile{}, fmt.Errorf("(extracting as %s) %w", extPair.name, err) + return nil, fmt.Errorf("(extracting as %s) %w", extractor.Name(), err) } - extractedAs = extPair.name - packages = newPackages - // TODO(rexpan): Determine if it's acceptable to have multiple extractors + for i := range newPackages { + newPackages[i].Extractor = extractor + } + + extractedAs = extractor.Name() + inventories = newPackages + // TODO(rexpan): Determine if this it's acceptable to have multiple extractors // extract from the same file successfully break } if extractedAs == "" { - return lockfile.Lockfile{}, fmt.Errorf("%w for %s", lockfile.ErrExtractorNotFound, path) + return nil, fmt.Errorf("%w for %s", lockfilescalibr.ErrExtractorNotFound, extractPath) } - // Sort to have deterministic output, and to match behavior of lockfile.extractDeps - sort.Slice(packages, func(i, j int) bool { - if packages[i].Name == packages[j].Name { - return packages[i].Version < packages[j].Version + // Perform any one-off translations here + for _, inv := range inventories { + // Scalibr uses go to indicate go compiler version + // We specifically cares about the stdlib version inside the package + // so convert the package name from go to stdlib + if inv.Ecosystem() == "Go" && inv.Name == "go" { + inv.Name = "stdlib" } - - return packages[i].Name < packages[j].Name - }) - - return lockfile.Lockfile{ - FilePath: path, - ParsedAs: extractedAs, - Packages: packages, - }, nil -} - -// A File represents a file that exists in an image -type File struct { - *os.File - - layer *Layer - path string -} - -func (f File) Open(openPath string) (lockfile.NestedDepFile, error) { - // use path instead of filepath, because container is always in Unix paths (for now) - if path.IsAbs(openPath) { - return OpenLayerFile(openPath, f.layer) - } - - absPath := path.Join(f.path, openPath) - - return OpenLayerFile(absPath, f.layer) -} - -func (f File) Path() string { - return f.path -} - -func OpenLayerFile(path string, layer *Layer) (File, error) { - fileNode, err := layer.getFileNode(path) - if err != nil { - return File{}, err } - file, err := fileNode.Open() - if err != nil { - return File{}, err - } - - return File{ - File: file, - path: path, - layer: layer, - }, nil + return inventories, nil } - -var _ lockfile.DepFile = File{} -var _ lockfile.NestedDepFile = File{} diff --git a/internal/image/fixtures/alpine-3.19-alpine-release b/internal/image/fixtures/alpine-3.18-alpine-release similarity index 100% rename from internal/image/fixtures/alpine-3.19-alpine-release rename to internal/image/fixtures/alpine-3.18-alpine-release diff --git a/internal/image/fixtures/alpine-3.18-os-release b/internal/image/fixtures/alpine-3.18-os-release new file mode 100644 index 0000000000..ffb92a8cd4 --- /dev/null +++ b/internal/image/fixtures/alpine-3.18-os-release @@ -0,0 +1,7 @@ +/ # cat /etc/os-release +NAME="Alpine Linux" +ID=alpine +VERSION_ID=3.18.1 +PRETTY_NAME="Alpine Linux v3.18" +HOME_URL="https://alpinelinux.org/" +BUG_REPORT_URL="https://gitlab.alpinelinux.org/alpine/aports/-/issues" diff --git a/internal/image/fixtures/test-alpine.Dockerfile b/internal/image/fixtures/test-alpine.Dockerfile index 5cf22e2812..d6aa79f1c8 100644 --- a/internal/image/fixtures/test-alpine.Dockerfile +++ b/internal/image/fixtures/test-alpine.Dockerfile @@ -1,4 +1,5 @@ FROM alpine:3.10@sha256:451eee8bedcb2f029756dc3e9d73bab0e7943c1ac55cff3a4861c52a0fdd3e98 -# Switch the version to 3.19 to show the advisories published for the latest alpine versions -COPY "alpine-3.19-alpine-release" "/etc/alpine-release" +# Switch the version to 3.18 to show the advisories published for the latest alpine versions +COPY "alpine-3.18-alpine-release" "/etc/alpine-release" +COPY "alpine-3.18-os-release" "/etc/os-release" diff --git a/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile b/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile index aa559ba285..67ff3b79f7 100644 --- a/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile +++ b/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="10.2.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "npm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-npm-full.Dockerfile b/internal/image/fixtures/test-node_modules-npm-full.Dockerfile index df412b7a12..96e136b5f7 100644 --- a/internal/image/fixtures/test-node_modules-npm-full.Dockerfile +++ b/internal/image/fixtures/test-node_modules-npm-full.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="10.2.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "npm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile b/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile index 8912eef5d0..7a221ca7ea 100644 --- a/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile +++ b/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="8.15.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "pnpm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile b/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile index 97a37c652a..80e1ee6519 100644 --- a/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile +++ b/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="8.15.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "pnpm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile b/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile index 7158d5d258..41f4c2f423 100644 --- a/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile +++ b/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="1.22.22" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "yarn@$MANAGER_VERSION" --force diff --git a/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile b/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile index 54889d6804..99e9653f01 100644 --- a/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile +++ b/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="1.22.22" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "yarn@$MANAGER_VERSION" --force diff --git a/internal/image/image.go b/internal/image/image.go index be3bd3171e..212845ebfd 100644 --- a/internal/image/image.go +++ b/internal/image/image.go @@ -11,9 +11,9 @@ import ( "path/filepath" "strings" - "github.com/dghubble/trie" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/google/go-containerregistry/pkg/v1/tarball" + "github.com/google/osv-scanner/internal/image/pathtree" "github.com/google/osv-scanner/pkg/lockfile" ) @@ -112,7 +112,7 @@ func LoadImage(imagePath string) (*Image, error) { } outputImage.layers[i] = Layer{ - fileNodeTrie: trie.NewPathTrie(), + fileNodeTrie: pathtree.NewNode[FileNode](), id: hash.Hex, rootImage: &outputImage, } @@ -180,6 +180,7 @@ func LoadImage(imagePath string) (*Image, error) { // filepath.Clean first to convert to OS specific file path // TODO: Escape invalid characters on windows that's valid on linux absoluteDiskPath := filepath.Join(dirPath, filepath.Clean(cleanedFilePath)) + symlinkTarget := "" var fileType fileType // write out the file/dir to disk @@ -191,8 +192,7 @@ func LoadImage(imagePath string) (*Image, error) { } } fileType = Dir - - default: // Assume if it's not a directory, it's a normal file + case tar.TypeReg: // Write all files as read/writable by the current user, inaccessible by anyone else // Actual permission bits are stored in FileNode f, err := os.OpenFile(absoluteDiskPath, os.O_CREATE|os.O_RDWR, filePermission) @@ -210,6 +210,11 @@ func LoadImage(imagePath string) (*Image, error) { } fileType = RegularFile f.Close() + case tar.TypeSymlink: + fileType = Symlink + symlinkTarget = header.Linkname + default: // Assume if it's not a directory or normal file + // TODO: Handle these cases } // Each outer loop, we add a layer to each relevant output flattenedLayers slice @@ -235,15 +240,21 @@ func LoadImage(imagePath string) (*Image, error) { continue } - currentMap.fileNodeTrie.Put(virtualPath, FileNode{ + err := currentMap.fileNodeTrie.Insert(virtualPath, &FileNode{ rootImage: &outputImage, // Select the original layer of the file - originLayer: &outputImage.layers[i], - virtualPath: virtualPath, - fileType: fileType, - isWhiteout: tombstone, - permission: fs.FileMode(header.Mode), //nolint:gosec + originLayer: &outputImage.layers[i], + virtualPath: virtualPath, + fileType: fileType, + linkTargetPath: symlinkTarget, + isWhiteout: tombstone, + // TODO: Fix file mode bits to contain the high bits + permission: fs.FileMode(header.Mode), //nolint:gosec }) + + if err != nil { + return &outputImage, fmt.Errorf("image tar has repeated files: %w", err) + } } } @@ -260,13 +271,12 @@ func inWhiteoutDir(fileMap Layer, filePath string) bool { if filePath == "" { break } - dirname := filepath.Dir(filePath) + dirname := path.Dir(filePath) if filePath == dirname { break } - val := fileMap.fileNodeTrie.Get(dirname) - item, ok := val.(FileNode) - if ok && item.isWhiteout { + node := fileMap.fileNodeTrie.Get(dirname) + if node != nil && node.isWhiteout { return true } filePath = dirname diff --git a/internal/image/image_test.go b/internal/image/image_test.go index 90bd028524..bc4397ab4e 100644 --- a/internal/image/image_test.go +++ b/internal/image/image_test.go @@ -3,7 +3,6 @@ package image_test import ( "errors" "os" - "sort" "testing" "github.com/google/osv-scanner/internal/image" @@ -94,10 +93,6 @@ func TestScanImage(t *testing.T) { } } - sort.Slice(got.Lockfiles, func(i, j int) bool { - return got.Lockfiles[i].FilePath < got.Lockfiles[j].FilePath - }) - tt.want.MatchJSON(t, got) }) } diff --git a/internal/image/layer.go b/internal/image/layer.go index 9e100dc03f..c25a089f93 100644 --- a/internal/image/layer.go +++ b/internal/image/layer.go @@ -3,9 +3,14 @@ package image import ( "io/fs" "os" + "strings" + "time" + + // Note that paths accessing the disk must use filepath, but all virtual paths should use path + "path" "path/filepath" - "github.com/dghubble/trie" + "github.com/google/osv-scanner/internal/image/pathtree" ) type fileType int @@ -13,19 +18,92 @@ type fileType int const ( RegularFile fileType = iota Dir + Symlink ) // FileNode represents a file on a specific layer, mapping the contents to an extracted file on disk type FileNode struct { // TODO: Determine the performance implications of having a pointer to base image in every fileNode - rootImage *Image - fileType fileType - isWhiteout bool - originLayer *Layer - virtualPath string - permission fs.FileMode + rootImage *Image + // TODO: Filetype is redundant if permission is set correctly + fileType fileType + isWhiteout bool + originLayer *Layer + virtualPath string + linkTargetPath string + permission fs.FileMode +} + +var _ fs.DirEntry = FileNode{} + +func (f FileNode) IsDir() bool { + return f.fileType == Dir +} + +func (f FileNode) Name() string { + return path.Base(f.virtualPath) +} + +func (f FileNode) Type() fs.FileMode { + return f.permission +} + +func (f FileNode) Info() (fs.FileInfo, error) { + return f.Stat() +} + +type FileNodeFileInfo struct { + baseFileInfo fs.FileInfo + fileNode *FileNode +} + +var _ fs.FileInfo = FileNodeFileInfo{} + +func (f FileNodeFileInfo) Name() string { + return path.Base(f.fileNode.virtualPath) +} + +func (f FileNodeFileInfo) Size() int64 { + return f.baseFileInfo.Size() } +func (f FileNodeFileInfo) Mode() fs.FileMode { + return f.fileNode.permission +} + +func (f FileNodeFileInfo) ModTime() time.Time { + return f.baseFileInfo.ModTime() +} + +func (f FileNodeFileInfo) IsDir() bool { + return f.fileNode.fileType == Dir +} + +func (f FileNodeFileInfo) Sys() any { + return nil +} + +// Stat returns the FileInfo structure describing file. +func (f *FileNode) Stat() (fs.FileInfo, error) { + // TODO: Implement this properly + if f.fileType == Symlink { + return FileNodeFileInfo{ + fileNode: f, + }, nil + } + + baseFileInfo, err := os.Stat(f.absoluteDiskPath()) + if err != nil { + return nil, err + } + + return FileNodeFileInfo{ + baseFileInfo: baseFileInfo, + fileNode: f, + }, nil +} + +// Open returns a file handle for the file func (f *FileNode) Open() (*os.File, error) { if f.isWhiteout { return nil, fs.ErrNotExist @@ -42,35 +120,82 @@ func (f *FileNode) absoluteDiskPath() string { type Layer struct { // id is the sha256 digest of the layer id string - fileNodeTrie *trie.PathTrie + fileNodeTrie *pathtree.Node[FileNode] rootImage *Image // TODO: Use hashmap to speed up path lookups } -func (filemap Layer) getFileNode(path string) (FileNode, error) { - node, ok := filemap.fileNodeTrie.Get(path).(FileNode) - if !ok { - return FileNode{}, fs.ErrNotExist +func (filemap Layer) Open(path string) (fs.File, error) { + node, err := filemap.getFileNode(path) + if err != nil { + return nil, err + } + + if node.fileType == Symlink { + return filemap.Open(node.linkTargetPath) + } + + return node.Open() +} + +func (filemap Layer) Stat(path string) (fs.FileInfo, error) { + node, err := filemap.getFileNode(path) + if err != nil { + return nil, err + } + + if node.fileType == Symlink { + return filemap.Stat(node.linkTargetPath) + } + + return node.Stat() +} + +func (filemap Layer) ReadDir(path string) ([]fs.DirEntry, error) { + children := filemap.fileNodeTrie.GetChildren(path) + output := make([]fs.DirEntry, 0, len(children)) + for _, node := range children { + output = append(output, node) + } + + return output, nil +} + +var _ fs.FS = Layer{} +var _ fs.StatFS = Layer{} +var _ fs.ReadDirFS = Layer{} + +func (filemap Layer) getFileNode(nodePath string) (*FileNode, error) { + // We expect all paths queried to be absolute paths rooted at the container root + // However, scalibr uses paths without a prepending /, because the paths are relative to Root. + // Root will always be '/' for container scanning, so prepend with / if necessary. + if !strings.HasPrefix(nodePath, "/") { + nodePath = path.Join("/", nodePath) + } + + node := filemap.fileNodeTrie.Get(nodePath) + if node == nil { + return nil, fs.ErrNotExist } return node, nil } // AllFiles return all files that exist on the layer the FileMap is representing -func (filemap Layer) AllFiles() []FileNode { - allFiles := []FileNode{} +func (filemap Layer) AllFiles() []*FileNode { + allFiles := []*FileNode{} // No need to check error since we are not returning any errors - _ = filemap.fileNodeTrie.Walk(func(_ string, value interface{}) error { - node := value.(FileNode) + _ = filemap.fileNodeTrie.Walk(func(_ string, node *FileNode) error { if node.fileType != RegularFile { // Only add regular files return nil } + // TODO: Check if parent is an opaque whiteout if node.isWhiteout { // Don't add whiteout files as they have been deleted return nil } - allFiles = append(allFiles, value.(FileNode)) + allFiles = append(allFiles, node) return nil }) diff --git a/internal/image/pathtree/pathtree.go b/internal/image/pathtree/pathtree.go new file mode 100644 index 0000000000..d14666a5a1 --- /dev/null +++ b/internal/image/pathtree/pathtree.go @@ -0,0 +1,133 @@ +// Package pathtree provides a tree structure for representing file paths. +// Each path segment is a node in the tree, enabling efficient storage +// and retrieval for building virtual file systems. +package pathtree + +import ( + "errors" + "fmt" + "strings" +) + +const divider string = "/" + +var ErrNodeAlreadyExists = errors.New("node already exists") + +// Root node represents the root directory / +type Node[V any] struct { + value *V + children map[string]*Node[V] +} + +func NewNode[V any]() *Node[V] { + return &Node[V]{ + children: make(map[string]*Node[V]), + } +} + +// Insert inserts a value into the tree at the given path. +// If a node already exists at the given path, an error is returned. +// +// If a file is inserted without also inserting the parent directory +// the parent directory entry will have a nil value. +func (node *Node[V]) Insert(path string, value *V) error { + path, err := cleanPath(path) + if err != nil { + return fmt.Errorf("Insert() error: %w", err) + } + + cursor := node + for _, segment := range strings.Split(path, divider) { + next, ok := cursor.children[segment] + // Create the segment if it doesn't exist + if !ok { + next = &Node[V]{ + value: nil, + children: make(map[string]*Node[V]), + } + cursor.children[segment] = next + } + cursor = next + } + + if cursor.value != nil { + return fmt.Errorf("%w: %v", ErrNodeAlreadyExists, divider+path) + } + + cursor.value = value + + return nil +} + +// Get retrieves the value at the given path. +// If no node exists at the given path, nil is returned. +func (node *Node[V]) Get(path string) *V { + path, _ = cleanPath(path) + + cursor := node + for _, segment := range strings.Split(path, divider) { + next, ok := cursor.children[segment] + if !ok { + return nil + } + cursor = next + } + + return cursor.value +} + +// Get retrieves all the direct children of this given path +func (node *Node[V]) GetChildren(path string) []*V { + path, _ = cleanPath(path) + + cursor := node + for _, segment := range strings.Split(path, divider) { + next, ok := cursor.children[segment] + if !ok { + return nil + } + cursor = next + } + + var children = make([]*V, 0, len(cursor.children)) + for _, child := range cursor.children { + // Some entries could be nil if a file is inserted without inserting the + // parent directories. + if child != nil { + children = append(children, child.value) + } + } + + return children +} + +// cleanPath returns a path for use in the tree +// additionally an error is returned if path is not formatted as expected +func cleanPath(inputPath string) (string, error) { + path, found := strings.CutPrefix(inputPath, divider) + if !found { + return "", fmt.Errorf("path %q is not an absolute path", inputPath) + } + path = strings.TrimSuffix(path, "/") + + return path, nil +} + +// Walk walks through all elements of this tree depths first, calling fn at every node +func (node *Node[V]) Walk(fn func(string, *V) error) error { + return node.walk("/", fn) +} + +func (node *Node[V]) walk(path string, fn func(string, *V) error) error { + for key, node := range node.children { + if err := fn(key, node.value); err != nil { + return err + } + err := node.walk(path+divider+key, fn) + if err != nil { + return err + } + } + + return nil +} diff --git a/internal/image/pathtree/pathtree_test.go b/internal/image/pathtree/pathtree_test.go new file mode 100644 index 0000000000..556c97545a --- /dev/null +++ b/internal/image/pathtree/pathtree_test.go @@ -0,0 +1,264 @@ +package pathtree_test + +import ( + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/osv-scanner/internal/image/pathtree" +) + +type testVal struct { + string +} + +func assertNoError(t *testing.T, err error) { + t.Helper() + + if err != nil { + t.Errorf("%v", err) + } +} + +func testTree(t *testing.T) *pathtree.Node[testVal] { + t.Helper() + + tree := pathtree.NewNode[testVal]() + assertNoError(t, tree.Insert("/a", &testVal{"value1"})) + assertNoError(t, tree.Insert("/a/b", &testVal{"value2"})) + assertNoError(t, tree.Insert("/a/b/c", &testVal{"value3"})) + assertNoError(t, tree.Insert("/a/b/d", &testVal{"value4"})) + assertNoError(t, tree.Insert("/a/e", &testVal{"value5"})) + assertNoError(t, tree.Insert("/a/e/f", &testVal{"value6"})) + assertNoError(t, tree.Insert("/a/b/d/f", &testVal{"value7"})) + assertNoError(t, tree.Insert("/a/g", &testVal{"value8"})) + + return tree +} + +func TestNode_Insert_Error(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + key string + val *testVal + }{ + { + name: "duplicate node", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value1"}) + + return tree + }(), + key: "/a", + val: &testVal{"value2"}, + }, + { + name: "duplicate node in subtree", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value1"}) + _ = tree.Insert("/a/b", &testVal{"value2"}) + + return tree + }(), + key: "/a/b", + val: &testVal{"value3"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + err := tt.tree.Insert(tt.key, tt.val) + if err == nil { + t.Errorf("Node.Insert() expected error, got nil") + } + }) + } +} + +func TestNode_Get(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + key string + want *testVal + }{ + { + name: "empty tree", + tree: pathtree.NewNode[testVal](), + key: "/a", + want: nil, + }, + { + name: "single node", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + key: "/a", + want: &testVal{"value"}, + }, + { + name: "non-existent node in single node tree", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + key: "/b", + want: nil, + }, + { + name: "multiple nodes", + tree: testTree(t), + key: "/a/b/c", + want: &testVal{"value3"}, + }, + { + name: "non-existent node", + tree: testTree(t), + key: "/a/b/g", + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got := tt.tree.Get(tt.key) + if diff := cmp.Diff(tt.want, got, cmp.AllowUnexported(testVal{})); diff != "" { + t.Errorf("Node.Get() (-want +got): %v", diff) + } + }) + } +} + +func TestNode_GetChildren(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + key string + want []*testVal + }{ + { + name: "empty tree", + tree: pathtree.NewNode[testVal](), + key: "/a", + want: nil, + }, + { + name: "single node no children", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + key: "/a", + want: []*testVal{}, + }, + { + name: "multiple nodes with children", + tree: testTree(t), + key: "/a/b", + want: []*testVal{ + {"value3"}, + {"value4"}, + }, + }, + { + name: "non-existent node", + tree: testTree(t), + key: "/a/b/g", + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got := tt.tree.GetChildren(tt.key) + if diff := cmp.Diff( + tt.want, + got, + cmp.AllowUnexported(testVal{}), + cmpopts.SortSlices(func(a, b *testVal) bool { + return strings.Compare(a.string, b.string) < 0 + })); diff != "" { + t.Errorf("Node.GetChildren() (-want +got): %v", diff) + } + }) + } +} + +func TestNode_Walk(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + want []string + }{ + { + name: "empty tree", + tree: pathtree.NewNode[testVal](), + want: []string{}, + }, + { + name: "single node", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + want: []string{"value"}, + }, + { + name: "multiple nodes", + tree: testTree(t), + want: []string{ + "value1", + "value2", + "value3", + "value4", + "value5", + "value6", + "value7", + "value8", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got := []string{} + err := tt.tree.Walk(func(_ string, node *testVal) error { + got = append(got, node.string) + return nil + }) + if err != nil { + t.Errorf("Node.Walk() error = %v", err) + } + if diff := cmp.Diff(tt.want, got, cmpopts.SortSlices(func(a, b string) bool { + return strings.Compare(a, b) < 0 + })); diff != "" { + t.Errorf("Node.Walk() (-want +got): %v", diff) + } + }) + } +} diff --git a/internal/image/scan.go b/internal/image/scan.go index 9bfc8ae02d..ccbd398b57 100644 --- a/internal/image/scan.go +++ b/internal/image/scan.go @@ -1,14 +1,21 @@ package image import ( + "cmp" "errors" "fmt" "io/fs" "log" + "path" + "slices" + "strings" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scanner/internal/lockfilescalibr" "github.com/google/osv-scanner/pkg/lockfile" "github.com/google/osv-scanner/pkg/models" "github.com/google/osv-scanner/pkg/reporter" + "golang.org/x/exp/maps" ) // ScanImage scans an exported docker image .tar file @@ -22,33 +29,105 @@ func ScanImage(r reporter.Reporter, imagePath string) (ScanResults, error) { allFiles := img.LastLayer().AllFiles() - scannedLockfiles := ScanResults{ + scanResults := ScanResults{ ImagePath: imagePath, } + + inventories := []*extractor.Inventory{} + for _, file := range allFiles { if file.fileType != RegularFile { continue } - parsedLockfile, err := extractArtifactDeps(file.virtualPath, img.LastLayer()) + + // TODO: Currently osv-scalibr does not correctly annotate OS packages + // causing artifact extractors to double extract elements here. + // So let's skip all these directories for now. + // See (b/364536788) + // + // https://en.wikipedia.org/wiki/Filesystem_Hierarchy_Standard + // > Secondary hierarchy for read-only user data; contains the majority of (multi-)user utilities and applications. + // > Should be shareable and read-only. + // + if strings.HasPrefix(file.virtualPath, "/usr/") { + continue + } + + extractedInventories, err := extractArtifactDeps(file.virtualPath, img.LastLayer()) if err != nil { - if !errors.Is(err, lockfile.ErrExtractorNotFound) { + if !errors.Is(err, lockfilescalibr.ErrExtractorNotFound) { r.Errorf("Attempted to extract lockfile but failed: %s - %v\n", file.virtualPath, err) } continue } + inventories = append(inventories, extractedInventories...) + } + + // TODO: Remove the lockfile.Lockfile conversion + // Temporarily convert back to lockfile.Lockfiles to minimize snapshot changes + // This is done to verify the scanning behavior have not changed with this refactor + // and to minimize changes in the initial PR. + lockfiles := map[string]lockfile.Lockfile{} + for _, i := range inventories { + if len(i.Annotations) > 1 { + log.Printf("%v", i.Annotations) + } + lf, exists := lockfiles[path.Join("/", i.Locations[0])] + if !exists { + lf = lockfile.Lockfile{ + FilePath: path.Join("/", i.Locations[0]), + ParsedAs: i.Extractor.Name(), + } + } + + pkg := lockfile.PackageDetails{ + Name: i.Name, + Version: i.Version, + Ecosystem: lockfile.Ecosystem(i.Ecosystem()), + CompareAs: lockfile.Ecosystem(strings.Split(i.Ecosystem(), ":")[0]), + } + if i.SourceCode != nil { + pkg.Commit = i.SourceCode.Commit + } - scannedLockfiles.Lockfiles = append(scannedLockfiles.Lockfiles, parsedLockfile) + lf.Packages = append(lf.Packages, pkg) + + lockfiles[path.Join("/", i.Locations[0])] = lf + } + + for _, l := range lockfiles { + slices.SortFunc(l.Packages, func(a, b lockfile.PackageDetails) int { + return cmp.Or( + strings.Compare(a.Name, b.Name), + strings.Compare(a.Version, b.Version), + ) + }) } - traceOrigin(img, &scannedLockfiles) + scanResults.Lockfiles = maps.Values(lockfiles) + slices.SortFunc(scanResults.Lockfiles, func(a, b lockfile.Lockfile) int { + return strings.Compare(a.FilePath, b.FilePath) + }) + + traceOrigin(img, &scanResults) + + // TODO: Reenable this sort when removing lockfile.Lockfile + // Sort to have deterministic output, and to match behavior of lockfile.extractDeps + // slices.SortFunc(scanResults.Inventories, func(a, b *extractor.Inventory) int { + // // TODO: Should we consider errors here? + // aPURL, _ := a.Extractor.ToPURL(a) + // bPURL, _ := b.Extractor.ToPURL(b) + + // return strings.Compare(aPURL.ToString(), bPURL.ToString()) + // }) err = img.Cleanup() if err != nil { err = fmt.Errorf("failed to cleanup: %w", img.Cleanup()) } - return scannedLockfiles, err + return scanResults, err } // traceOrigin fills out the originLayerID for each package in ScanResults @@ -60,15 +139,30 @@ func traceOrigin(img *Image, scannedLockfiles *ScanResults) { Name string Version string Commit string - Ecosystem lockfile.Ecosystem + Ecosystem string } + // TODO: Remove this function after fully migrating to extractor.Inventory makePDKey := func(pd lockfile.PackageDetails) PDKey { return PDKey{ Name: pd.Name, Version: pd.Version, Commit: pd.Commit, - Ecosystem: pd.Ecosystem, + Ecosystem: string(pd.Ecosystem), + } + } + + makePDKey2 := func(pd *extractor.Inventory) PDKey { + var commit string + if pd.SourceCode != nil { + commit = pd.SourceCode.Commit + } + + return PDKey{ + Name: pd.Name, + Version: pd.Version, + Commit: commit, + Ecosystem: pd.Ecosystem(), } } @@ -120,12 +214,11 @@ func traceOrigin(img *Image, scannedLockfiles *ScanResults) { // Failed to parse an older version of file in image // Behave as if the file does not exist break - // log.Panicf("unimplemented! failed to parse an older version of file in image: %s@%s: %v", file.FilePath, oldFileNode.originLayer.id, err) } // For each package in the old version, check if it existed in the newer layer, if so, the origin must be this layer or earlier. - for _, pkg := range oldDeps.Packages { - key := makePDKey(pkg) + for _, pkg := range oldDeps { + key := makePDKey2(pkg) if val, ok := sourceLayerIdx[key]; ok && val == prevLayerIdx { sourceLayerIdx[key] = layerIdx } diff --git a/internal/lockfilescalibr/errors.go b/internal/lockfilescalibr/errors.go new file mode 100644 index 0000000000..005ee0012b --- /dev/null +++ b/internal/lockfilescalibr/errors.go @@ -0,0 +1,9 @@ +package lockfilescalibr + +import "errors" + +var ErrIncompatibleFileFormat = errors.New("file format is incompatible, but this is expected") +var ErrNotImplemented = errors.New("not implemented") +var ErrWrongExtractor = errors.New("this extractor did not create this inventory") +var ErrExtractorNotFound = errors.New("could not determine extractor") +var ErrNoExtractorsFound = errors.New("no extractors found to be suitable to this file") diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/extractor.go b/internal/lockfilescalibr/language/java/pomxmlnet/extractor.go new file mode 100644 index 0000000000..3a1a5f51c0 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/extractor.go @@ -0,0 +1,188 @@ +// Package pomxmlnet extracts Maven's pom.xml format with transitive dependency resolution. +package pomxmlnet + +import ( + "context" + "fmt" + "io/fs" + "path/filepath" + + "golang.org/x/exp/maps" + + mavenresolve "deps.dev/util/resolve/maven" + mavenutil "github.com/google/osv-scanner/internal/utility/maven" + + "deps.dev/util/maven" + "deps.dev/util/resolve" + "deps.dev/util/resolve/dep" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/osv" + "github.com/google/osv-scalibr/plugin" + "github.com/google/osv-scalibr/purl" + "github.com/google/osv-scanner/internal/resolution/client" + "github.com/google/osv-scanner/internal/resolution/datasource" +) + +// Extractor extracts osv packages from osv-scanner json output. +type Extractor struct { + client.DependencyClient + *datasource.MavenRegistryAPIClient +} + +// Name of the extractor. +func (e Extractor) Name() string { return "osv/pomxmlnet" } + +// Version of the extractor. +func (e Extractor) Version() int { return 0 } + +// Requirements of the extractor. +func (e Extractor) Requirements() *plugin.Capabilities { + return &plugin.Capabilities{ + Network: true, + } +} + +// FileRequired never returns true, as this is for the osv-scanner json output. +func (e Extractor) FileRequired(path string, _ fs.FileInfo) bool { + return filepath.Base(path) == "pom.xml" +} + +// Extract extracts packages from yarn.lock files passed through the scan input. +func (e Extractor) Extract(ctx context.Context, input *filesystem.ScanInput) ([]*extractor.Inventory, error) { + var project maven.Project + if err := datasource.NewMavenDecoder(input.Reader).Decode(&project); err != nil { + return nil, fmt.Errorf("could not extract from %s: %w", input.Path, err) + } + // Empty JDK and ActivationOS indicates merging the default profiles. + if err := project.MergeProfiles("", maven.ActivationOS{}); err != nil { + return nil, fmt.Errorf("failed to merge profiles: %w", err) + } + for _, repo := range project.Repositories { + if err := e.MavenRegistryAPIClient.AddRegistry(string(repo.URL)); err != nil { + return nil, fmt.Errorf("failed to add registry %s: %w", repo.URL, err) + } + } + // Merging parents data by parsing local parent pom.xml or fetching from upstream. + if err := mavenutil.MergeParents(ctx, e.MavenRegistryAPIClient, &project, project.Parent, 1, input.Path, true); err != nil { + return nil, fmt.Errorf("failed to merge parents: %w", err) + } + // Process the dependencies: + // - dedupe dependencies and dependency management + // - import dependency management + // - fill in missing dependency version requirement + project.ProcessDependencies(func(groupID, artifactID, version maven.String) (maven.DependencyManagement, error) { + return mavenutil.GetDependencyManagement(ctx, e.MavenRegistryAPIClient, groupID, artifactID, version) + }) + + if registries := e.MavenRegistryAPIClient.GetRegistries(); len(registries) > 0 { + clientRegs := make([]client.Registry, len(registries)) + for i, reg := range registries { + clientRegs[i] = client.Registry{URL: reg} + } + if err := e.DependencyClient.AddRegistries(clientRegs); err != nil { + return nil, err + } + } + + overrideClient := client.NewOverrideClient(e.DependencyClient) + resolver := mavenresolve.NewResolver(overrideClient) + + // Resolve the dependencies. + root := resolve.Version{ + VersionKey: resolve.VersionKey{ + PackageKey: resolve.PackageKey{ + System: resolve.Maven, + Name: project.ProjectKey.Name(), + }, + VersionType: resolve.Concrete, + Version: string(project.Version), + }} + reqs := make([]resolve.RequirementVersion, len(project.Dependencies)+len(project.DependencyManagement.Dependencies)) + for i, d := range project.Dependencies { + reqs[i] = resolve.RequirementVersion{ + VersionKey: resolve.VersionKey{ + PackageKey: resolve.PackageKey{ + System: resolve.Maven, + Name: d.Name(), + }, + VersionType: resolve.Requirement, + Version: string(d.Version), + }, + Type: resolve.MavenDepType(d, ""), + } + } + for i, d := range project.DependencyManagement.Dependencies { + reqs[len(project.Dependencies)+i] = resolve.RequirementVersion{ + VersionKey: resolve.VersionKey{ + PackageKey: resolve.PackageKey{ + System: resolve.Maven, + Name: d.Name(), + }, + VersionType: resolve.Requirement, + Version: string(d.Version), + }, + Type: resolve.MavenDepType(d, mavenutil.OriginManagement), + } + } + overrideClient.AddVersion(root, reqs) + + client.PreFetch(ctx, overrideClient, reqs, input.Path) + g, err := resolver.Resolve(ctx, root.VersionKey) + if err != nil { + return nil, fmt.Errorf("failed resolving %v: %w", root, err) + } + for i, e := range g.Edges { + e.Type = dep.Type{} + g.Edges[i] = e + } + + details := map[string]*extractor.Inventory{} + for i := 1; i < len(g.Nodes); i++ { + // Ignore the first node which is the root. + node := g.Nodes[i] + depGroups := []string{} + inventory := extractor.Inventory{ + Name: node.Version.Name, + Version: node.Version.Version, + // TODO(rexpan): Add merged paths in here as well + Locations: []string{input.Path}, + } + // We are only able to know dependency groups of direct dependencies but + // not transitive dependencies because the nodes in the resolve graph does + // not have the scope information. + for _, dep := range project.Dependencies { + if dep.Name() != inventory.Name { + continue + } + if dep.Scope != "" && dep.Scope != "compile" { + depGroups = append(depGroups, string(dep.Scope)) + } + } + inventory.Metadata = osv.DepGroupMetadata{ + DepGroupVals: depGroups, + } + details[inventory.Name] = &inventory + } + + return maps.Values(details), nil +} + +// ToPURL converts an inventory created by this extractor into a PURL. +func (e Extractor) ToPURL(i *extractor.Inventory) *purl.PackageURL { + return &purl.PackageURL{ + Type: purl.TypeMaven, + Name: i.Name, + Version: i.Version, + } +} + +// ToCPEs is not applicable as this extractor does not infer CPEs from the Inventory. +func (e Extractor) ToCPEs(_ *extractor.Inventory) []string { return []string{} } + +// Ecosystem returns the OSV ecosystem ('npm') of the software extracted by this extractor. +func (e Extractor) Ecosystem(_ *extractor.Inventory) string { + return "Maven" +} + +var _ filesystem.Extractor = Extractor{} diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/extractor_test.go b/internal/lockfilescalibr/language/java/pomxmlnet/extractor_test.go new file mode 100644 index 0000000000..556663be75 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/extractor_test.go @@ -0,0 +1,366 @@ +package pomxmlnet_test + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem/osv" + "github.com/google/osv-scalibr/testing/extracttest" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/java/pomxmlnet" + "github.com/google/osv-scanner/internal/resolution/clienttest" + "github.com/google/osv-scanner/internal/resolution/datasource" + "github.com/google/osv-scanner/internal/testutility" +) + +func TestMavenResolverExtractor_FileRequired(t *testing.T) { + t.Parallel() + + tests := []struct { + path string + want bool + }{ + { + path: "", + want: false, + }, + { + path: "pom.xml", + want: true, + }, + { + path: "path/to/my/pom.xml", + want: true, + }, + { + path: "path/to/my/pom.xml/file", + want: false, + }, + { + path: "path/to/my/pom.xml.file", + want: false, + }, + { + path: "path.to.my.pom.xml", + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + t.Parallel() + e := pomxmlnet.Extractor{} + got := e.FileRequired(tt.path, nil) + if got != tt.want { + t.Errorf("Extract() got = %v, want %v", got, tt.want) + } + }) + } +} + +func TestExtractor_Extract(t *testing.T) { + t.Parallel() + + tests := []extracttest.TestTableEntry{ + { + Name: "Not a pom file", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/not-pom.txt", + }, + WantErr: extracttest.ContainsErrStr{Str: "could not extract from"}, + }, + { + Name: "invalid xml syntax", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/invalid-syntax.xml", + }, + WantErr: extracttest.ContainsErrStr{Str: "XML syntax error"}, + }, + { + Name: "empty", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/empty.xml", + }, + WantInventory: []*extractor.Inventory{}, + }, + { + Name: "one package", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/one-package.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.apache.maven:maven-artifact", + Version: "1.0.0", + Locations: []string{"testdata/maven/one-package.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "two packages", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/two-packages.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "io.netty:netty-all", + Version: "4.1.42.Final", + Locations: []string{"testdata/maven/two-packages.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.slf4j:slf4j-log4j12", + Version: "1.7.25", + Locations: []string{"testdata/maven/two-packages.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "with dependency management", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/with-dependency-management.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "io.netty:netty-all", + Version: "4.1.9", + Locations: []string{"testdata/maven/with-dependency-management.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.slf4j:slf4j-log4j12", + Version: "1.7.25", + Locations: []string{"testdata/maven/with-dependency-management.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "interpolation", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/interpolation.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.mine:mypackage", + Version: "1.0.0", + Locations: []string{"testdata/maven/interpolation.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.mine:my.package", + Version: "2.3.4", + Locations: []string{"testdata/maven/interpolation.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.mine:ranged-package", + Version: "9.4.37", + Locations: []string{"testdata/maven/interpolation.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "with scope / dep groups", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/with-scope.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "junit:junit", + Version: "4.12", + Locations: []string{"testdata/maven/with-scope.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{"runtime"}}, + }, + }, + }, + { + Name: "transitive dependencies", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/transitive.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.direct:alice", + Version: "1.0.0", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.direct:bob", + Version: "2.0.0", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.direct:chris", + Version: "3.0.0", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:chuck", + Version: "1.1.1", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:dave", + Version: "2.2.2", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:eve", + Version: "3.3.3", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:frank", + Version: "4.4.4", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + resolutionClient := clienttest.NewMockResolutionClient(t, "testdata/universe/basic-universe.yaml") + extr := pomxmlnet.Extractor{ + DependencyClient: resolutionClient, + MavenRegistryAPIClient: &datasource.MavenRegistryAPIClient{}, + } + + scanInput := extracttest.GenerateScanInputMock(t, tt.InputConfig) + defer extracttest.CloseTestScanInput(t, scanInput) + + got, err := extr.Extract(context.Background(), &scanInput) + + if diff := cmp.Diff(tt.WantErr, err, cmpopts.EquateErrors()); diff != "" { + t.Errorf("%s.Extract(%q) error diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + return + } + + if diff := cmp.Diff(tt.WantInventory, got, cmpopts.SortSlices(extracttest.InventoryCmpLess)); diff != "" { + t.Errorf("%s.Extract(%q) diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + } + }) + } +} + +func TestExtractor_Extract_WithMockServer(t *testing.T) { + t.Parallel() + + tt := extracttest.TestTableEntry{ + // Name: "with parent", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/with-parent.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.alice:alice", + Version: "1.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.bob:bob", + Version: "2.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.chuck:chuck", + Version: "3.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.dave:dave", + Version: "4.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.eve:eve", + Version: "5.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.frank:frank", + Version: "6.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + } + + srv := testutility.NewMockHTTPServer(t) + srv.SetResponse(t, "org/upstream/parent-pom/1.0/parent-pom-1.0.pom", []byte(` + + org.upstream + parent-pom + 1.0 + pom + + + org.eve + eve + 5.0.0 + + + + `)) + srv.SetResponse(t, "org/import/import/1.2.3/import-1.2.3.pom", []byte(` + + org.import + import + 1.2.3 + pom + + + + org.frank + frank + 6.0.0 + + + + + `)) + + apiClient, err := datasource.NewMavenRegistryAPIClient(srv.URL) + if err != nil { + t.Fatalf("%v", err) + } + + resolutionClient := clienttest.NewMockResolutionClient(t, "testdata/universe/basic-universe.yaml") + extr := pomxmlnet.Extractor{ + DependencyClient: resolutionClient, + MavenRegistryAPIClient: apiClient, + } + + scanInput := extracttest.GenerateScanInputMock(t, tt.InputConfig) + defer extracttest.CloseTestScanInput(t, scanInput) + + got, err := extr.Extract(context.Background(), &scanInput) + + if diff := cmp.Diff(tt.WantErr, err, cmpopts.EquateErrors()); diff != "" { + t.Errorf("%s.Extract(%q) error diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + return + } + + if diff := cmp.Diff(tt.WantInventory, got, cmpopts.SortSlices(extracttest.InventoryCmpLess)); diff != "" { + t.Errorf("%s.Extract(%q) diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + } +} diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/empty.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/empty.xml new file mode 100644 index 0000000000..8cfeebaaa4 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/empty.xml @@ -0,0 +1,7 @@ + + 4.0.0 + + com.mycompany.app + my-app + 1 + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/interpolation.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/interpolation.xml new file mode 100644 index 0000000000..6b7f761afc --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/interpolation.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + + io.library + my-library + 1.0-SNAPSHOT + jar + + + 1.0.0 + 2.3.4 + [9.4.35.v20201120,9.5) + + + + + org.mine + mypackage + ${mypackageVersion} + + + + org.mine + my.package + ${my.package.version} + + + + org.mine + ranged-package + ${version-range} + + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/invalid-syntax.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/invalid-syntax.xml new file mode 100644 index 0000000000..761a32c1ab --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/invalid-syntax.xml @@ -0,0 +1,13 @@ + + + <${Id}.version>${project.version} + + + + + io.netty + netty-all + 4.1.42.Final + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/not-pom.txt b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/not-pom.txt new file mode 100644 index 0000000000..f9df712bcb --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/not-pom.txt @@ -0,0 +1 @@ +this is not a pom.xml file! diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/one-package.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/one-package.xml new file mode 100644 index 0000000000..bbb1359e9d --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/one-package.xml @@ -0,0 +1,17 @@ + + com.mycompany.app + my-app + 1.0 + + + 3.0 + + + + + org.apache.maven + maven-artifact + 1.0.0 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/parent/pom.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/parent/pom.xml new file mode 100644 index 0000000000..3751df6be3 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/parent/pom.xml @@ -0,0 +1,21 @@ + + org.local + parent-pom + 1.0 + + pom + + + org.upstream + parent-pom + 1.0 + + + + + org.dave + dave + 4.0.0 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/transitive.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/transitive.xml new file mode 100644 index 0000000000..52e416a0bc --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/transitive.xml @@ -0,0 +1,33 @@ + + com.mycompany.app + my-app + 1.0 + + + + + org.transitive + frank + 4.4.4 + + + + + + + org.direct + alice + 1.0.0 + + + org.direct + bob + 2.0.0 + + + org.direct + chris + 3.0.0 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/two-packages.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/two-packages.xml new file mode 100644 index 0000000000..897f648a1e --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/two-packages.xml @@ -0,0 +1,22 @@ + + com.mycompany.app + my-app + 1.0 + + + 3.0 + + + + + io.netty + netty-all + 4.1.42.Final + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-dependency-management.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-dependency-management.xml new file mode 100644 index 0000000000..1928688e94 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-dependency-management.xml @@ -0,0 +1,37 @@ + + com.mycompany.app + my-app + 1.0 + + + 3.0 + + + + + io.netty + netty-all + 4.1.9 + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + + + io.netty + netty-all + 4.1.42.Final + + + com.google.code.findbugs + jsr305 + 3.0.2 + + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-parent.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-parent.xml new file mode 100644 index 0000000000..602b8b877f --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-parent.xml @@ -0,0 +1,54 @@ + + com.mycompany.app + my-app + 1.0 + + + org.local + parent-pom + 1.0 + ./parent/pom.xml + + + + 2.0.0 + + + + + org.alice + alice + 1.0.0 + + + org.bob + bob + ${bob.version} + + + org.chuck + chuck + + + org.frank + frank + + + + + + + org.chuck + chuck + 3.0.0 + + + org.import + import + 1.2.3 + pom + import + + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-scope.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-scope.xml new file mode 100644 index 0000000000..688c6bb7bc --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-scope.xml @@ -0,0 +1,14 @@ + + com.mycompany.app + my-app + 1.0 + + + + junit + junit + 4.12 + runtime + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/universe/basic-universe.yaml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/universe/basic-universe.yaml new file mode 100644 index 0000000000..2bf2b32724 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/universe/basic-universe.yaml @@ -0,0 +1,60 @@ +system: maven +schema: | + com.google.code.findbugs:jsr305 + 3.0.2 + io.netty:netty-all + 4.1.9 + 4.1.42.Final + junit:junit + 4.12 + org.alice:alice + 1.0.0 + org.apache.maven:maven-artifact + 1.0.0 + org.bob:bob + 2.0.0 + org.chuck:chuck + 3.0.0 + org.dave:dave + 4.0.0 + org.direct:alice + 1.0.0 + org.transitive:chuck@1.1.1 + org.transitive:dave@2.2.2 + org.direct:bob + 2.0.0 + org.transitive:eve@3.3.3 + org.direct:chris + 3.0.0 + org.transitive:frank@3.3.3 + org.eve:eve + 5.0.0 + org.frank:frank + 6.0.0 + org.mine:my.package + 2.3.4 + org.mine:mypackage + 1.0.0 + org.mine:ranged-package + 9.4.35 + 9.4.36 + 9.4.37 + 9.5 + org.slf4j:slf4j-log4j12 + 1.7.25 + org.transitive:chuck + 1.1.1 + 2.2.2 + org.transitive:eve@2.2.2 + 3.3.3 + org.transitive:dave + 1.1.1 + 2.2.2 + 3.3.3 + org.transitive:eve + 1.1.1 + 2.2.2 + 3.3.3 + org.transitive:frank + 3.3.3 + 4.4.4 diff --git a/internal/lockfilescalibr/language/javascript/nodemodules/extractor.go b/internal/lockfilescalibr/language/javascript/nodemodules/extractor.go new file mode 100644 index 0000000000..a965b2fecd --- /dev/null +++ b/internal/lockfilescalibr/language/javascript/nodemodules/extractor.go @@ -0,0 +1,57 @@ +package nodemodules + +import ( + "context" + "io/fs" + "path/filepath" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/packagelockjson" + "github.com/google/osv-scalibr/plugin" + "github.com/google/osv-scalibr/purl" +) + +type Extractor struct { + actualExtractor packagelockjson.Extractor +} + +var _ filesystem.Extractor = Extractor{} + +// Name of the extractor. +func (e Extractor) Name() string { return "javascript/nodemodules" } + +// Version of the extractor. +func (e Extractor) Version() int { return 0 } + +// Requirements of the extractor. +func (e Extractor) Requirements() *plugin.Capabilities { + return &plugin.Capabilities{} +} + +// FileRequired returns true for .package-lock.json files under node_modules +func (e Extractor) FileRequired(path string, _ fs.FileInfo) bool { + return filepath.Base(filepath.Dir(path)) == "node_modules" && filepath.Base(path) == ".package-lock.json" +} + +// Extract extracts packages from yarn.lock files passed through the scan input. +func (e Extractor) Extract(ctx context.Context, input *filesystem.ScanInput) ([]*extractor.Inventory, error) { + return e.actualExtractor.Extract(ctx, input) +} + +// ToPURL converts an inventory created by this extractor into a PURL. +func (e Extractor) ToPURL(i *extractor.Inventory) *purl.PackageURL { + return e.actualExtractor.ToPURL(i) +} + +// ToCPEs is not applicable as this extractor does not infer CPEs from the Inventory. +func (e Extractor) ToCPEs(i *extractor.Inventory) []string { + return e.actualExtractor.ToCPEs(i) +} + +// Ecosystem returns the OSV ecosystem ('npm') of the software extracted by this extractor. +func (e Extractor) Ecosystem(i *extractor.Inventory) string { + return e.actualExtractor.Ecosystem(i) +} + +var _ filesystem.Extractor = Extractor{} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/extractor.go b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor.go new file mode 100644 index 0000000000..27de9b2580 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor.go @@ -0,0 +1,84 @@ +// Package osvscannerjson extracts osv-scanner's json output. +package osvscannerjson + +import ( + "context" + "encoding/json" + "fmt" + "io/fs" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/plugin" + "github.com/google/osv-scalibr/purl" + "github.com/google/osv-scanner/pkg/models" +) + +// Extractor extracts osv packages from osv-scanner json output. +type Extractor struct{} + +// Name of the extractor. +func (e Extractor) Name() string { return "osv/osvscannerjson" } + +// Version of the extractor. +func (e Extractor) Version() int { return 0 } + +// Requirements of the extractor. +func (e Extractor) Requirements() *plugin.Capabilities { + return &plugin.Capabilities{} +} + +// FileRequired never returns true, as this is for the osv-scanner json output. +func (e Extractor) FileRequired(_ string, _ fs.FileInfo) bool { + return false +} + +// Extract extracts packages from yarn.lock files passed through the scan input. +func (e Extractor) Extract(_ context.Context, input *filesystem.ScanInput) ([]*extractor.Inventory, error) { + parsedResults := models.VulnerabilityResults{} + err := json.NewDecoder(input.Reader).Decode(&parsedResults) + + if err != nil { + return nil, fmt.Errorf("could not extract from %s: %w", input.Path, err) + } + + packages := []*extractor.Inventory{} + for _, res := range parsedResults.Results { + for _, pkg := range res.Packages { + inventory := extractor.Inventory{ + Name: pkg.Package.Name, + Version: pkg.Package.Version, + Metadata: Metadata{ + Ecosystem: pkg.Package.Ecosystem, + SourceInfo: res.Source, + }, + Locations: []string{input.Path}, + } + if pkg.Package.Commit != "" { + inventory.SourceCode = &extractor.SourceCodeIdentifier{ + Commit: pkg.Package.Commit, + } + } + + packages = append(packages, &inventory) + } + } + + return packages, nil +} + +// ToPURL converts an inventory created by this extractor into a PURL. +func (e Extractor) ToPURL(_ *extractor.Inventory) *purl.PackageURL { + // TODO: support purl conversion + return nil +} + +// ToCPEs is not applicable as this extractor does not infer CPEs from the Inventory. +func (e Extractor) ToCPEs(_ *extractor.Inventory) []string { return []string{} } + +// Ecosystem returns the OSV ecosystem ('npm') of the software extracted by this extractor. +func (e Extractor) Ecosystem(i *extractor.Inventory) string { + return i.Metadata.(Metadata).Ecosystem +} + +var _ filesystem.Extractor = Extractor{} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/extractor_test.go b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor_test.go new file mode 100644 index 0000000000..65289c4d4c --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor_test.go @@ -0,0 +1,139 @@ +package osvscannerjson_test + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/testing/extracttest" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/osv/osvscannerjson" + "github.com/google/osv-scanner/pkg/models" +) + +func TestExtractor_Extract(t *testing.T) { + t.Parallel() + + tests := []extracttest.TestTableEntry{ + { + Name: "invalid yaml", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/not-json.txt", + }, + WantErr: extracttest.ContainsErrStr{Str: "could not extract from"}, + }, + { + Name: "empty", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/empty.json", + }, + WantInventory: []*extractor.Inventory{}, + }, + { + Name: "one package", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/one-package.json", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "activesupport", + Version: "7.0.7", + Locations: []string{"testdata/one-package.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "RubyGems", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Gemfile.lock", + Type: "lockfile", + }, + }, + }, + }, + }, + { + Name: "one package with commit", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/one-package-commit.json", + }, + WantInventory: []*extractor.Inventory{ + { + Locations: []string{"testdata/one-package-commit.json"}, + SourceCode: &extractor.SourceCodeIdentifier{ + Commit: "9a6bd55c9d0722cb101fe85a3b22d89e4ff4fe52", + }, + Metadata: osvscannerjson.Metadata{ + SourceInfo: models.SourceInfo{ + Path: "/path/to/Gemfile.lock", + Type: "lockfile", + }, + }, + }, + }, + }, + { + Name: "multiple packages", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/multiple-packages-with-vulns.json", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "crossbeam-utils", + Version: "0.6.6", + Locations: []string{"testdata/multiple-packages-with-vulns.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "crates.io", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Cargo.lock", + Type: "lockfile", + }, + }, + }, + { + Name: "memoffset", + Version: "0.5.6", + Locations: []string{"testdata/multiple-packages-with-vulns.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "crates.io", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Cargo.lock", + Type: "lockfile", + }, + }, + }, + { + Name: "smallvec", + Version: "1.6.0", + Locations: []string{"testdata/multiple-packages-with-vulns.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "crates.io", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Cargo.lock", + Type: "lockfile", + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + extr := osvscannerjson.Extractor{} + + scanInput := extracttest.GenerateScanInputMock(t, tt.InputConfig) + defer extracttest.CloseTestScanInput(t, scanInput) + + got, err := extr.Extract(context.Background(), &scanInput) + + if diff := cmp.Diff(tt.WantErr, err, cmpopts.EquateErrors()); diff != "" { + t.Errorf("%s.Extract(%q) error diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + return + } + + if diff := cmp.Diff(tt.WantInventory, got, cmpopts.SortSlices(extracttest.InventoryCmpLess)); diff != "" { + t.Errorf("%s.Extract(%q) diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + } + }) + } +} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/metadata.go b/internal/lockfilescalibr/language/osv/osvscannerjson/metadata.go new file mode 100644 index 0000000000..45c9e2c966 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/metadata.go @@ -0,0 +1,9 @@ +package osvscannerjson + +import "github.com/google/osv-scanner/pkg/models" + +// Metadata holds the metadata for osvscanner.json +type Metadata struct { + Ecosystem string + SourceInfo models.SourceInfo +} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/empty.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/empty.json new file mode 100644 index 0000000000..a9452a2e2b --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/empty.json @@ -0,0 +1,3 @@ +{ + "results": [] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/multiple-packages-with-vulns.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/multiple-packages-with-vulns.json new file mode 100644 index 0000000000..c861029368 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/multiple-packages-with-vulns.json @@ -0,0 +1,504 @@ +{ + "results": [ + { + "source": { + "path": "/path/to/Cargo.lock", + "type": "lockfile" + }, + "packages": [ + { + "package": { + "name": "crossbeam-utils", + "version": "0.6.6", + "ecosystem": "crates.io" + }, + "vulnerabilities": [ + { + "modified": "2022-08-11T21:55:33Z", + "published": "2022-02-16T22:36:21Z", + "schema_version": "1.4.0", + "id": "GHSA-qc84-gqf4-9926", + "aliases": [ + "CVE-2022-23639" + ], + "summary": "crossbeam-utils Race Condition vulnerability", + "details": "### Impact\n\nThe affected version of this crate incorrectly assumed that the alignment of `{i,u}64` was always the same as `Atomic{I,U}64`. \n\nHowever, the alignment of `{i,u}64` on a 32-bit target can be smaller than `Atomic{I,U}64`.\n\nThis can cause the following problems:\n\n- Unaligned memory accesses\n- Data race\n\nCrates using `fetch_*` methods with `AtomicCell\u003c{i,u}64\u003e` are affected by this issue.\n\n32-bit targets without `Atomic{I,U}64` and 64-bit targets are not affected by this issue.\n32-bit targets with `Atomic{I,U}64` and `{i,u}64` have the same alignment are also not affected by this issue.\n\nThe following is a complete list of the builtin targets that may be affected. (last update: nightly-2022-02-11)\n\n- armv7-apple-ios (tier 3)\n- armv7s-apple-ios (tier 3)\n- i386-apple-ios (tier 3)\n- i586-unknown-linux-gnu\n- i586-unknown-linux-musl\n- i686-apple-darwin (tier 3)\n- i686-linux-android\n- i686-unknown-freebsd\n- i686-unknown-haiku (tier 3)\n- i686-unknown-linux-gnu\n- i686-unknown-linux-musl\n- i686-unknown-netbsd (tier 3)\n- i686-unknown-openbsd (tier 3)\n- i686-wrs-vxworks (tier 3)\n\n([script to get list](https://gist.github.com/taiki-e/3c7891e8c5f5e0cbcb44d7396aabfe10))\n\n### Patches\n\nThis has been fixed in crossbeam-utils 0.8.7.\n\nAffected 0.8.x releases have been yanked.\n\n### References\n\nhttps://github.com/crossbeam-rs/crossbeam/pull/781 \n\n### License\n\nThis advisory is in the public domain.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "crossbeam-utils", + "purl": "pkg:cargo/crossbeam-utils" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0" + }, + { + "fixed": "0.8.7" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/02/GHSA-qc84-gqf4-9926/GHSA-qc84-gqf4-9926.json" + } + } + ], + "severity": [ + { + "type": "CVSS_V3", + "score": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + ], + "references": [ + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/security/advisories/GHSA-qc84-gqf4-9926" + }, + { + "type": "ADVISORY", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-23639" + }, + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/pull/781" + }, + { + "type": "PACKAGE", + "url": "https://github.com/crossbeam-rs/crossbeam" + }, + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/releases/tag/crossbeam-utils-0.8.7" + }, + { + "type": "WEB", + "url": "https://rustsec.org/advisories/RUSTSEC-2022-0041.html" + } + ], + "database_specific": { + "cwe_ids": [ + "CWE-362" + ], + "github_reviewed": true, + "github_reviewed_at": "2022-02-16T22:36:21Z", + "nvd_published_at": "2022-02-15T19:15:00Z", + "severity": "HIGH" + } + }, + { + "modified": "2022-08-04T13:56:30Z", + "published": "2022-02-05T12:00:00Z", + "schema_version": "1.4.0", + "id": "RUSTSEC-2022-0041", + "aliases": [ + "GHSA-qc84-gqf4-9926", + "CVE-2022-23639" + ], + "summary": "Unsoundness of AtomicCell\u003c*64\u003e arithmetics on 32-bit targets that support Atomic*64", + "details": "## Impact\n\nAffected versions of this crate incorrectly assumed that the alignment of {i,u}64 was always the same as Atomic{I,U}64.\n\nHowever, the alignment of {i,u}64 on a 32-bit target can be smaller than Atomic{I,U}64.\n\nThis can cause the following problems:\n\n- Unaligned memory accesses\n- Data race\n\nCrates using fetch_* methods with AtomicCell\u003c{i,u}64\u003e are affected by this issue.\n\n32-bit targets without Atomic{I,U}64 and 64-bit targets are not affected by this issue.\n\n32-bit targets with Atomic{I,U}64 and {i,u}64 have the same alignment are also not affected by this issue.\n\nThe following is a complete list of the builtin targets that may be affected. (last update: nightly-2022-02-11)\n\n- armv7-apple-ios (tier 3)\n- armv7s-apple-ios (tier 3)\n- i386-apple-ios (tier 3)\n- i586-unknown-linux-gnu\n- i586-unknown-linux-musl\n- i686-apple-darwin (tier 3)\n- i686-linux-android\n- i686-unknown-freebsd\n- i686-unknown-haiku (tier 3)\n- i686-unknown-linux-gnu\n- i686-unknown-linux-musl\n- i686-unknown-netbsd (tier 3)\n- i686-unknown-openbsd (tier 3)\n- i686-wrs-vxworks (tier 3)\n\n([script to get list](https://gist.github.com/taiki-e/3c7891e8c5f5e0cbcb44d7396aabfe10))\n\n## Patches\n\nThis has been fixed in crossbeam-utils 0.8.7.\n\nAffected 0.8.x releases have been yanked.\n\nThanks to @taiki-e", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "crossbeam-utils", + "purl": "pkg:cargo/crossbeam-utils" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.0.0-0" + }, + { + "fixed": "0.8.7" + } + ] + } + ], + "database_specific": { + "categories": [ + "memory-corruption" + ], + "cvss": null, + "informational": "unsound", + "source": "https://github.com/rustsec/advisory-db/blob/osv/crates/RUSTSEC-2022-0041.json" + }, + "ecosystem_specific": { + "affects": { + "arch": [], + "functions": [], + "os": [] + } + } + } + ], + "references": [ + { + "type": "PACKAGE", + "url": "https://crates.io/crates/crossbeam-utils" + }, + { + "type": "ADVISORY", + "url": "https://rustsec.org/advisories/RUSTSEC-2022-0041.html" + }, + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/pull/781" + } + ] + } + ], + "groups": [ + { + "ids": [ + "GHSA-qc84-gqf4-9926", + "RUSTSEC-2022-0041" + ] + } + ] + }, + { + "package": { + "name": "memoffset", + "version": "0.5.6", + "ecosystem": "crates.io" + }, + "vulnerabilities": [ + { + "modified": "2023-06-21T22:06:29Z", + "published": "2023-06-21T22:06:29Z", + "schema_version": "1.4.0", + "id": "GHSA-wfg4-322g-9vqv", + "summary": "memoffset allows reading uninitialized memory", + "details": "memoffset allows attempt of reading data from address `0` with arbitrary type. This behavior is an undefined behavior because address `0` to `std::mem::size_of\u003cT\u003e` may not have valid bit-pattern with `T`. Old implementation dereferences uninitialized memory obtained from `std::mem::align_of`. Older implementation prior to it allows using uninitialized data obtained from `std::mem::uninitialized` with arbitrary type then compute offset by taking the address of field-projection. This may also result in an undefined behavior for \"father\" that includes (directly or transitively) type that [does not allow to be uninitialized](https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html).\n\nThis flaw was corrected by using `std::ptr::addr_of` in \u003chttps://github.com/Gilnaa/memoffset/pull/50\u003e.\n", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "memoffset", + "purl": "pkg:cargo/memoffset" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0" + }, + { + "fixed": "0.6.2" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2023/06/GHSA-wfg4-322g-9vqv/GHSA-wfg4-322g-9vqv.json" + } + } + ], + "references": [ + { + "type": "WEB", + "url": "https://github.com/Gilnaa/memoffset/issues/24" + }, + { + "type": "WEB", + "url": "https://github.com/Gilnaa/memoffset/pull/50" + }, + { + "type": "PACKAGE", + "url": "https://github.com/Gilnaa/memoffset" + }, + { + "type": "WEB", + "url": "https://rustsec.org/advisories/RUSTSEC-2023-0045.html" + } + ], + "database_specific": { + "cwe_ids": [], + "github_reviewed": true, + "github_reviewed_at": "2023-06-21T22:06:29Z", + "nvd_published_at": null, + "severity": "MODERATE" + } + }, + { + "modified": "2023-07-08T12:30:19Z", + "published": "2023-06-21T12:00:00Z", + "schema_version": "1.4.0", + "id": "RUSTSEC-2023-0045", + "aliases": [ + "GHSA-wfg4-322g-9vqv" + ], + "summary": "memoffset allows reading uninitialized memory", + "details": "memoffset allows attempt of reading data from address `0` with arbitrary type. This behavior is an undefined behavior because address `0` to `std::mem::size_of\u003cT\u003e` may not have valid bit-pattern with `T`. Old implementation dereferences uninitialized memory obtained from `std::mem::align_of`. Older implementation prior to it allows using uninitialized data obtained from `std::mem::uninitialized` with arbitrary type then compute offset by taking the address of field-projection. This may also result in an undefined behavior for \"father\" that includes (directly or transitively) type that [does not allow to be uninitialized](https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html).\n\nThis flaw was corrected by using `std::ptr::addr_of` in \u003chttps://github.com/Gilnaa/memoffset/pull/50\u003e.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "memoffset", + "purl": "pkg:cargo/memoffset" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.0.0-0" + }, + { + "fixed": "0.6.2" + } + ] + } + ], + "database_specific": { + "categories": [ + "memory-corruption" + ], + "cvss": null, + "informational": "unsound", + "source": "https://github.com/rustsec/advisory-db/blob/osv/crates/RUSTSEC-2023-0045.json" + }, + "ecosystem_specific": { + "affects": { + "arch": [], + "functions": [ + "memoffset::offset_of" + ], + "os": [] + } + } + } + ], + "references": [ + { + "type": "PACKAGE", + "url": "https://crates.io/crates/memoffset" + }, + { + "type": "ADVISORY", + "url": "https://rustsec.org/advisories/RUSTSEC-2023-0045.html" + }, + { + "type": "REPORT", + "url": "https://github.com/Gilnaa/memoffset/issues/24" + } + ] + } + ], + "groups": [ + { + "ids": [ + "GHSA-wfg4-322g-9vqv", + "RUSTSEC-2023-0045" + ] + } + ] + }, + { + "package": { + "name": "smallvec", + "version": "1.6.0", + "ecosystem": "crates.io" + }, + "vulnerabilities": [ + { + "modified": "2023-06-13T20:51:42Z", + "published": "2022-05-24T17:40:21Z", + "schema_version": "1.4.0", + "id": "GHSA-43w2-9j62-hq99", + "aliases": [ + "CVE-2021-25900" + ], + "summary": "Buffer overflow in SmallVec::insert_many", + "details": "A bug in the SmallVec::insert_many method caused it to allocate a buffer that was smaller than needed. It then wrote past the end of the buffer, causing a buffer overflow and memory corruption on the heap. This bug was only triggered if the iterator passed to insert_many yielded more items than the lower bound returned from its size_hint method.\n\nThe flaw was corrected in smallvec 0.6.14 and 1.6.1, by ensuring that additional space is always reserved for each item inserted. The fix also simplified the implementation of insert_many to use less unsafe code, so it is easier to verify its correctness.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "smallvec", + "purl": "pkg:cargo/smallvec" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.6.3" + }, + { + "fixed": "0.6.14" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/05/GHSA-43w2-9j62-hq99/GHSA-43w2-9j62-hq99.json" + }, + "ecosystem_specific": { + "affected_functions": [ + "smallvec::SmallVec::insert_many" + ] + } + }, + { + "package": { + "ecosystem": "crates.io", + "name": "smallvec", + "purl": "pkg:cargo/smallvec" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "1.0.0" + }, + { + "fixed": "1.6.1" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/05/GHSA-43w2-9j62-hq99/GHSA-43w2-9j62-hq99.json" + }, + "ecosystem_specific": { + "affected_functions": [ + "smallvec::SmallVec::insert_many" + ] + } + } + ], + "severity": [ + { + "type": "CVSS_V3", + "score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + ], + "references": [ + { + "type": "ADVISORY", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-25900" + }, + { + "type": "WEB", + "url": "https://github.com/servo/rust-smallvec/issues/252" + }, + { + "type": "PACKAGE", + "url": "https://github.com/servo/rust-smallvec" + }, + { + "type": "WEB", + "url": "https://rustsec.org/advisories/RUSTSEC-2021-0003.html" + } + ], + "database_specific": { + "cwe_ids": [ + "CWE-787" + ], + "github_reviewed": true, + "github_reviewed_at": "2022-06-17T00:20:48Z", + "nvd_published_at": "2021-01-26T18:16:00Z", + "severity": "CRITICAL" + } + }, + { + "modified": "2023-06-13T13:10:24Z", + "published": "2021-01-08T12:00:00Z", + "schema_version": "1.4.0", + "id": "RUSTSEC-2021-0003", + "aliases": [ + "CVE-2021-25900", + "GHSA-43w2-9j62-hq99" + ], + "summary": "Buffer overflow in SmallVec::insert_many", + "details": "A bug in the `SmallVec::insert_many` method caused it to allocate a buffer that was smaller than needed. It then wrote past the end of the buffer, causing a buffer overflow and memory corruption on the heap.\n\nThis bug was only triggered if the iterator passed to `insert_many` yielded more items than the lower bound returned from its `size_hint` method.\n \nThe flaw was corrected in smallvec 0.6.14 and 1.6.1, by ensuring that additional space is always reserved for each item inserted. The fix also simplified the implementation of `insert_many` to use less unsafe code, so it is easier to verify its correctness.\n\nThank you to Yechan Bae (@Qwaz) and the Rust group at Georgia Tech’s SSLab for finding and reporting this bug.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "smallvec", + "purl": "pkg:cargo/smallvec" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.6.3" + }, + { + "fixed": "0.6.14" + }, + { + "introduced": "1.0.0" + }, + { + "fixed": "1.6.1" + } + ] + } + ], + "database_specific": { + "categories": [ + "memory-corruption" + ], + "cvss": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "informational": null, + "source": "https://github.com/rustsec/advisory-db/blob/osv/crates/RUSTSEC-2021-0003.json" + }, + "ecosystem_specific": { + "affects": { + "arch": [], + "functions": [ + "smallvec::SmallVec::insert_many" + ], + "os": [] + } + } + } + ], + "severity": [ + { + "type": "CVSS_V3", + "score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + ], + "references": [ + { + "type": "PACKAGE", + "url": "https://crates.io/crates/smallvec" + }, + { + "type": "ADVISORY", + "url": "https://rustsec.org/advisories/RUSTSEC-2021-0003.html" + }, + { + "type": "REPORT", + "url": "https://github.com/servo/rust-smallvec/issues/252" + } + ] + } + ], + "groups": [ + { + "ids": [ + "GHSA-43w2-9j62-hq99", + "RUSTSEC-2021-0003" + ] + } + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/not-json.txt b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/not-json.txt new file mode 100644 index 0000000000..319318e4d7 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/not-json.txt @@ -0,0 +1 @@ +this is not valid json! (I think) diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package-commit.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package-commit.json new file mode 100644 index 0000000000..044efa3e48 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package-commit.json @@ -0,0 +1,19 @@ +{ + "results": [ + { + "source": { + "path": "/path/to/Gemfile.lock", + "type": "lockfile" + }, + "packages": [ + { + "package": { + "commit": "9a6bd55c9d0722cb101fe85a3b22d89e4ff4fe52" + }, + "vulnerabilities": [], + "groups": [] + } + ] + } + ] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package.json new file mode 100644 index 0000000000..ceeca26123 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package.json @@ -0,0 +1,21 @@ +{ + "results": [ + { + "source": { + "path": "/path/to/Gemfile.lock", + "type": "lockfile" + }, + "packages": [ + { + "package": { + "name": "activesupport", + "version": "7.0.7", + "ecosystem": "RubyGems" + }, + "vulnerabilities": [], + "groups": [] + } + ] + } + ] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/translation.go b/internal/lockfilescalibr/translation.go new file mode 100644 index 0000000000..5cebcbf6a9 --- /dev/null +++ b/internal/lockfilescalibr/translation.go @@ -0,0 +1,188 @@ +package lockfilescalibr + +import ( + "context" + "fmt" + "io/fs" + "os" + "sort" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/language/dart/pubspec" + "github.com/google/osv-scalibr/extractor/filesystem/language/dotnet/packageslockjson" + "github.com/google/osv-scalibr/extractor/filesystem/language/erlang/mixlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/golang/gomod" + "github.com/google/osv-scalibr/extractor/filesystem/language/java/gradlelockfile" + "github.com/google/osv-scalibr/extractor/filesystem/language/java/gradleverificationmetadataxml" + "github.com/google/osv-scalibr/extractor/filesystem/language/java/pomxml" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/packagelockjson" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/pnpmlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/yarnlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/php/composerlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/pdmlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/pipfilelock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/poetrylock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/requirements" + "github.com/google/osv-scalibr/extractor/filesystem/language/r/renvlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/ruby/gemfilelock" + "github.com/google/osv-scalibr/extractor/filesystem/language/rust/cargolock" + + scalibrfs "github.com/google/osv-scalibr/fs" +) + +var lockfileExtractors = []filesystem.Extractor{ + // conanlock.Extractor{}, + packageslockjson.Extractor{}, + mixlock.Extractor{}, + pubspec.Extractor{}, + gomod.Extractor{}, + pomxml.Extractor{}, + gradlelockfile.Extractor{}, + gradleverificationmetadataxml.Extractor{}, + packagelockjson.Extractor{}, + pnpmlock.Extractor{}, + yarnlock.Extractor{}, + composerlock.Extractor{}, + pipfilelock.Extractor{}, + pdmlock.Extractor{}, + poetrylock.Extractor{}, + requirements.Extractor{}, + renvlock.Extractor{}, + gemfilelock.Extractor{}, + cargolock.Extractor{}, +} + +var lockfileExtractorMapping = map[string]string{ + "pubspec.lock": "dart/pubspec", + "pnpm-lock.yaml": "javascript/pnpmlock", + "yarn.lock": "javascript/yarnlock", + "package-lock.json": "javascript/packagelockjson", + "pom.xml": "java/pomxml", + "buildscript-gradle.lockfile": "java/gradlelockfile", + "gradle.lockfile": "java/gradlelockfile", + "verification-metadata.xml": "java/gradleverificationmetadataxml", + "poetry.lock": "python/poetrylock", + "Pipfile.lock": "python/Pipfilelock", + "pdm.lock": "python/pdmlock", + "requirements.txt": "python/requirements", + "Cargo.lock": "rust/Cargolock", + "composer.lock": "php/composerlock", + "mix.lock": "erlang/mixlock", + "renv.lock": "r/renvlock", + "packages.lock.json": "dotnet/packageslockjson", + // "conan.lock": "cpp/conanlock", + "go.mod": "go/gomod", + "Gemfile.lock": "ruby/gemfilelock", +} + +// ExtractWithExtractor attempts to extract the file at the given path with the extractor passed in +func ExtractWithExtractor(ctx context.Context, localPath string, ext filesystem.Extractor) ([]*extractor.Inventory, error) { + info, err := os.Stat(localPath) + if err != nil { + return nil, err + } + + return extractWithExtractor(ctx, localPath, info, ext) +} + +// Extract attempts to extract the file at the given path +// +// Args: +// - localPath: the path to the lockfile +// - extractAs: the name of the lockfile format to extract as (Using OSV-Scanner V1 extractor names) +// +// Returns: +// - []*extractor.Inventory: the extracted lockfile data +// - error: any errors encountered during extraction +// +// If extractAs is not specified, then the function will attempt to +// identify the lockfile format based on the file name. +// +// If no extractors are found, then ErrNoExtractorsFound is returned. +func Extract(ctx context.Context, localPath string, extractAs string) ([]*extractor.Inventory, error) { + info, err := os.Stat(localPath) + if err != nil { + return nil, err + } + + if extractAs != "" { + return extractAsSpecific(ctx, extractAs, localPath, info) + } + + output := []*extractor.Inventory{} + extractorFound := false + + for _, ext := range lockfileExtractors { + if ext.FileRequired(localPath, info) { + extractorFound = true + + inv, err := extractWithExtractor(ctx, localPath, info, ext) + if err != nil { + return nil, err + } + + output = append(output, inv...) + } + } + + if !extractorFound { + return nil, ErrNoExtractorsFound + } + + sort.Slice(output, func(i, j int) bool { + if output[i].Name == output[j].Name { + return output[i].Version < output[j].Version + } + + return output[i].Name < output[j].Name + }) + + return output, nil +} + +// Use the extractor specified by extractAs string key +func extractAsSpecific(ctx context.Context, extractAs string, localPath string, info fs.FileInfo) ([]*extractor.Inventory, error) { + for _, ext := range lockfileExtractors { + if lockfileExtractorMapping[extractAs] == ext.Name() { + return extractWithExtractor(ctx, localPath, info, ext) + } + } + + return nil, fmt.Errorf("%w, requested %s", ErrExtractorNotFound, extractAs) +} + +func extractWithExtractor(ctx context.Context, localPath string, info fs.FileInfo, ext filesystem.Extractor) ([]*extractor.Inventory, error) { + si, err := createScanInput(localPath, info) + if err != nil { + return nil, err + } + + inv, err := ext.Extract(ctx, si) + if err != nil { + return nil, fmt.Errorf("(extracting as %s) %w", ext.Name(), err) + } + + for i := range inv { + inv[i].Extractor = ext + } + + return inv, nil +} + +func createScanInput(path string, fileInfo fs.FileInfo) (*filesystem.ScanInput, error) { + reader, err := os.Open(path) + if err != nil { + return nil, err + } + + si := filesystem.ScanInput{ + FS: os.DirFS("/").(scalibrfs.FS), + Path: path, + Root: "/", + Reader: reader, + Info: fileInfo, + } + + return &si, nil +} diff --git a/internal/lockfilescalibr/translation_test.go b/internal/lockfilescalibr/translation_test.go new file mode 100644 index 0000000000..14c5f72e1d --- /dev/null +++ b/internal/lockfilescalibr/translation_test.go @@ -0,0 +1,23 @@ +package lockfilescalibr + +import ( + "testing" +) + +func TestLockfileScalibrMappingExists(t *testing.T) { + t.Parallel() + + for _, target := range lockfileExtractorMapping { + found := false + for _, ext := range lockfileExtractors { + if target == ext.Name() { + found = true + break + } + } + + if !found { + t.Errorf("Extractor %v not found.", target) + } + } +} diff --git a/internal/output/__snapshots__/sarif_test.snap b/internal/output/__snapshots__/sarif_test.snap index f306395f03..ec9062ba3e 100755 --- a/internal/output/__snapshots__/sarif_test.snap +++ b/internal/output/__snapshots__/sarif_test.snap @@ -62,7 +62,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -149,7 +149,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -170,7 +170,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -191,7 +191,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -212,7 +212,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -233,7 +233,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -254,7 +254,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -275,7 +275,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -296,7 +296,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -317,7 +317,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -338,7 +338,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -359,7 +359,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -380,7 +380,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -401,7 +401,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -422,7 +422,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -443,7 +443,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -501,7 +501,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -631,7 +631,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -743,7 +743,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -809,7 +809,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -875,7 +875,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -941,7 +941,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -1061,7 +1061,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -1272,7 +1272,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -1410,7 +1410,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -1468,7 +1468,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -1634,7 +1634,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -1845,7 +1845,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -1983,7 +1983,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -2004,7 +2004,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -2025,7 +2025,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -2046,7 +2046,7 @@ "informationUri": "https://github.com/google/osv-scanner", "name": "osv-scanner", "rules": [], - "version": "1.9.0" + "version": "1.9.1" } }, "results": [] @@ -2104,7 +2104,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2187,7 +2187,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2253,7 +2253,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2319,7 +2319,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2385,7 +2385,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2452,7 +2452,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2536,7 +2536,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2637,7 +2637,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2720,7 +2720,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ @@ -2786,7 +2786,7 @@ } } ], - "version": "1.9.0" + "version": "1.9.1" } }, "artifacts": [ diff --git a/internal/remediation/fixtures/santatracker/osv-scanner.toml b/internal/remediation/fixtures/santatracker/osv-scanner.toml index b399bb4c28..db94704b26 100644 --- a/internal/remediation/fixtures/santatracker/osv-scanner.toml +++ b/internal/remediation/fixtures/santatracker/osv-scanner.toml @@ -1,191 +1,4 @@ [[PackageOverrides]] -name = "@babel/traverse" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "@grpc/grpc-js" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "acorn" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "ajv" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "ansi-regex" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "braces" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "browserslist" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "dat.gui" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "get-func-name" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "glob-parent" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "google-closure-library" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "html-minifier" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "json-schema" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "json5" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "lodash" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "minimatch" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "minimist" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "node-fetch" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "node-forge " -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "node-forge" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "path-parse" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "pathval" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "postcss" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "protobufjs" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "qs" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "request" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "semver" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "terser" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "tough-cookie" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "ws" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "y18n" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "yargs-parser" ecosystem = "npm" ignore = true reason = "This is an intentionally vulnerable test project" diff --git a/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml b/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml index 250f7b7530..d84c70b89e 100644 --- a/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml +++ b/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml @@ -1,143 +1,4 @@ [[PackageOverrides]] -name = "com.fasterxml.jackson.core:jackson-databind" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.google.guava:guava" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.jcraft:jsch" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.nimbusds:nimbus-jose-jwt" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "io.atomix:atomix" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "io.netty:netty-codec" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "io.netty:netty-handler" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.commons:commons-compress" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.commons:commons-configuration2" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.directory.api:api-ldap-model" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.mina:mina-core" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.pdfbox:pdfbox" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.shiro:shiro-core" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.shiro:shiro-web" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.thrift:libthrift" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.bouncycastle:bcprov-jdk15on" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.codehaus.jackson:jackson-mapper-asl" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.eclipse.jgit:org.eclipse.jgit" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.glassfish.jersey.core:jersey-common" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.google.code.gson:gson" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "commons-collections:commons-collections" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.httpcomponents:httpclient" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.eclipse.jetty:jetty-webapp" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.quartz-scheduler:quartz" ecosystem = "Maven" ignore = true reason = "This is an intentionally vulnerable test project" diff --git a/internal/semantic/fixtures/all-versions.txt b/internal/semantic/fixtures/all-versions.txt deleted file mode 100644 index 87837c03dd..0000000000 --- a/internal/semantic/fixtures/all-versions.txt +++ /dev/null @@ -1,2530 +0,0 @@ -# Loaded 6470 vulnerabilities (including withdrawn, last updated Tue, 01 Mar 2022 16:48:44 GMT) -0.2.3 -1.8.6 -3.3.1.11 -10.0.12 -7.0.0-M1 -13.4.7 -2.20.4 -2.6.18 -13.1.2 -0.56.1 -21.11.3 -9.22.1 -7.1.3 -7.0.98 -11.0.0-beta.6 -4.2.0.32 -5.15.13 -2.12.5 -0.9.5.3 -9.0.2 -4.8.2 -6.3.5.3 -1.31.0 -4.0.4 -2.8.11 -1.13.6.1 -5.2.19 -0.11.7 -3.3.11 -10.0.0-M10 -0.17.2 -3.3.23 -0.9.8 -98.1.210 -0.2.8 -5.15.1 -86.0.241 -5.0.19 -0.23.0-RC1 -14.4.5 -0.10 -0.0.53 -1.3.2.1 -1.6.37 -1.0.9 -6.1.1 -2.3.5 -1.12.3 -0.85.0 -1.3.19 -1.4.15 -7.7.24 -5.6.4 -2.31.0 -4.2.5.2 -0.9.16 -4.8.10 -4.17.14 -2.2.11 -0.14.6 -2.14.1 -4.3a1 -3.6 -4.1.12 -2.22.3 -2.13.19 -1.6.6.0 -7.4.0 -1.53 -1.22.0 -9.13.2 -5.2.4.3 -1.9.8 -9.0.12 -4.18.2 -6.5.3 -2.8.52 -5.1.12 -1.0.5 -1.13.7 -6.20.14 -14.4.7 -2.11.7 -6.0.22 -3.1.18 -2.3.11 -0.7 -4.1.2 -2.2.6 -4.3.29 -4.0.11 -2.11.0 -1.13.0 -8.5.34 -3.26.0 -4.3.0.4 -0.2.2.1 -2.13.4 -1.5.10 -0.10.3 -5.6.3 -13.0.2 -1.4.44-liberty-2 -4.11 -8.5.0 -0.31.0 -1.13.6 -0.16.2 -3.2-2020.10.28.23.06 -4.10.3 -20.12.3 -7.0.36 -1.1.0 -6.2.6 -10.0.17 -2.6.10.1 -2.2.10.RELEASE0.5 -2.2.26 -3.2.8 -3.0.11 -2.0.38 -0.8.7 -4.0.15 -1.3.0 -0.35.1 -1.11.15 -3.23.6 -10.4.14 -6.6.1 -3.19.0 -2.40.0 -0.12.19 -6.1.3.2 -5.2.6 -5.16.0 -4.2 -2.4 -4.0.8 -2.0.23 -1.9.3 -0.37.2 -2.5.22 -4.2.2 -3.4.7 -1.7.0-beta.8 -8.6.0 -1.7.11 -8.5.2 -1.2.2.1 -13.0.0 -1.0.0-rc9 -4.0.9 -5.1b1 -1.6.1 -9.3.1 -20.12.0 -7.0.27 -5.2.0 -6.11.4 -7.5.1 -4.5.13 -11.30.1 -2.1.16 -0.8.2 -4.80.0 -1.6.34 -11.1.0 -2.2.16 -20200315 -0.15.14 -3.12.0 -0.1.6 -2.7.16 -19.180.0 -3.2.0 -16.2.0 -0.95 -1.9.7 -0.1.9 -1.52.0 -5.4.0 -4.2.16 -9.0.31 -5.2.8 -0.6.5 -6.5.0 -1.0.2.1 -1.3.23 -1.6.5 -0.7.6 -16.0.0.0rc1 -3.5.8 -4.1.17 -9.10 -2.10.8 -3.10.8 -12.8 -7.0.2.2 -7.6.2 -3.8.5 -12.6.3 -4.4.23 -1.17.0 -8.11.1 -2.11.0.M3 -1.11.28 -0.30.3 -5.7.2 -3.17 -0.11.6 -0.4.1 -16.1.1 -5.0.16 -0.34.0 -0.65.2 -1.7.8.3 -3.1.21 -12.0.0 -0.8.13 -2.8.11.3 -1.0.29 -3.27.19 -4.0.0-alpha -2.0.16 -2.3.1 -19.38.7 -8.40.0 -0.29 -3.7.14 -8.5.7 -2.10.3 -4.9.1 -6.6.3 -0.17.3 -9.3.27.v20190418 -3.2.0.3 -7.7.20 -5.3.14 -2.5.16 -1.4.12 -12.5 -7.6.53 -5.10.0 -3.4.26 -6.4.6.1 -10.3.1 -8.7.40 -5.18.0 -1.12.5 -8.11.0 -3.6.2 -7.0.2 -7.62.0 -17.0.0 -3.0.0-beta.1 -1.22.2 -4.4.1 -2.8.10 -6.4.1.1 -0.0.8 -4.5.1 -0.5.4 -5.3.7 -3.2.10 -2.0.0-rc8 -0.1.5 -7.0.90 -4.5 -6.2.3 -0.0.13 -0.43.0 -0.14.14 -1.63 -3.6.12 -2.2.20 -2.8.37 -2.11.0.M1 -8.5.50 -9.0.3 -5.9.0 -23.0.2.Final -0.8.3 -4.1.0.25 -2.3.16 -6.2.5 -4.17.12 -0.0.25 -8.3.5 -4.13.0 -3.1.31 -2.5.0 -0.0.1 -3.10 -18.0.0 -5.8.17 -0.8.16 -7.0.1 -0.1.15 -3.1.42 -0.3.0 -9.3.24.v20180605 -1.11.9 -5.8.3 -3.31 -5.3.15 -5.1.6 -3.2.11 -10.0.1 -19.5.0 -4.4.19 -4.8.6 -8.5.49 -9.0.4 -0.11.91 -10.1.0-M1 -5.3 -18.0.7 -9.0.40 -11.0.6 -6.1.2 -2.0.19 -#20.04.0 -9.4.43 -4.4.0.0 -0.98 -11.0.4 -0.31.12 -111.11 -11.10.11 -1.28.0 -4.7.7 -2.7.5 -0.18.6 -8.12.3 -0.26.1 -5.2.4 -0.9.7 -3.3.4 -4.5.10 -6.6.6 -5.5.0.Alpha1 -2.5.15 -4.8.3 -1.0.0.3 -2.6.5 -2.6.17 -3.19.1 -1.56 -2.5.13 -3.20.9 -0.30.0 -1.0.19 -7.9 -14.0.6 -3.6.3 -2.0.17 -7.30.4 -6.1.2.1 -2.12.21 -3.9.3 -6.0.9 -3.3.3 -0 -2.6.7.1 -7.0.99 -0.1.8 -1.67 -1.1.7 -2.0.2 -3.1.5 -2.8.11.1 -1.11.20 -5.2.3 -6.3.5.2 -2.1.19 -8.7.41 -2.3.15 -2.10.11 -9.3.20.v20170531 -0.3.7-beta -3.4.3 -0.11.3 -0.15.9 -3.2.2 -1.8.2-beta.4 -1.11.23 -0.23.0-M1 -9.0.45 -0.4 -2.0.8.1 -11.5.0 -1.4.21 -10.0.8 -2.5.9 -3.0.5 -0.8.6 -1.2.6.1 -4.44.4 -4.7.9 -2.0.0.v20210717-M17 -4.15.0 -1.12.0-RC1 -0.18.0 -1.10.2 -0.26 -1.7.4 -8.1.2 -1.0.0-beta.6 -2.9.5 -3.3.5 -1.15.0 -4.79.0 -3.9.2 -6.0.1 -2.2.21 -1.9.13 -1.12.7 -0.12.17 -0.12.36 -2.4.11 -1.9.10 -2.9.9.2 -1.18.1 -2.4.13 -1.12.0 -0.12.7 -6.0.3.2 -3.4.0.12 -2022.1.8 -13.7-rc-1 -0.17.1 -4.72.2 -9.3.9 -7.7.0 -9.2.27.v20190403 -9.4.21.v20190926 -6.14.6 -1.25.9 -17.5.0 -1.11.27 -0.2.12 -3.7.11 -2.6.8 -1.0.0-M1 -0.7.24 -6.2.1 -1.16.0 -5.2.2 -2.8.8 -2.13.1 -2.11.8 -0.6.16 -2.14.11 -2.19.1 -3.12.2 -1.3.9 -3.11.0 -0.4.6 -7.0.3 -13.1 -1.5.8 -1.5.2.1 -0.26.2 -15.0 -2.7.51 -0.33.1 -1.23.0 -0.4.5 -4.0.0-beta -1.4.19 -1.10 -2.6.14 -1.24.2 -1.7.5.0 -3.1.4 -0.0.7 -6.20.26 -19.0.9 -1.21.3 -4.0.6 -1.2.3 -1.9.1 -0.18.3 -7.0.100 -1.10.22 -19.0.4 -19.10.23 -8.5.4 -9.0.58 -2.7.9.1 -12.10.5 -7.1.0 -16.4.2 -#4.3.04 -0.17.4 -11.1.3 -2.0.18 -2.2.13 -1.0.14 -8.2.13 -2.0.6 -2.0.14 -1.6.22 -1.4.9 -0.27.4 -10.4.18 -7.0.86 -8.0.7 -2.63.0 -1.4.14 -1.16.3 -10.0.7 -12.10.6 -0.193.0 -0.3.0M7 -1.8.2 -0.19.5 -10.0.0-M1 -9.0.6 -5.6.2 -1.7.24 -8.5.59 -2.2.15 -2.14.0 -5.2.5 -22.1.0 -2.5.1 -4.3.2 -0.18 -8.2.3 -9.0.36 -2.0.0-beta.3 -2.7.9.5 -1.19.4 -6.3.4.1 -0.125.0 -1.17.58 -3.0.0 -8.5.28 -2.5.10.1 -7.0.41 -1.7.16 -2.16.0 -6.0.18 -7.5.15.2 -1.9.17 -0.21.7 -5.3.8 -2.31.1 -3.4.0.4 -0.107.6 -3.1.39 -2.9.18 -0.22.0 -3.1.14 -1.2.14 -1.3.3 -3.4.9 -2.8.11.2 -3.9.1 -4.14.0 -3.4.1.15 -2.814.0 -3.2.17 -2.4.3 -2.5 -1.11.8 -0.3.6 -5.6.0 -5.4 -3.5.17 -1.61 -0.8.4 -2.4.1 -0.11.1 -2.18.0 -2.20.1 -2.7.9.4 -10.0.2 -4.0-beta2 -1.14.2 -2.1 -5.0.13 -2.3.12 -2.49.0 -2.5.11 -2.2.33 -1.1b1 -5.0rc1 -2.3.33 -2.21.2 -1.11.22 -8.1.4 -1.16.7 -2.3.0 -0.4.4 -1.0.12 -4.27.11 -6.2.0 -3.5.5 -1.11.1 -4.4.52 -12.10.4 -1.15.1 -4.4.4 -7.30.6 -8.0.0-M1 -5.0.3 -0.12.1 -2.10.7 -1.10.6 -3.11.4 -2.0.31 -3.42.6 -5.8.1 -1.5.3 -4.2.13 -10.0.0-beta.0 -3.6.7 -3.4.14 -5.4.5 -6.0.2 -3.3.8 -0.38.0-beta.2 -2.0.12 -10.2.4 -2.1.59 -4.4.56 -1.6.3 -1.6.14 -2.11.6 -1.7.10 -0.7.1 -1.0.8 -1.3.14 -1.19.6 -2.8.0 -1.9.11 -1.0.15 -39.1.0 -3.9.5 -6.13.0 -6.1.0.Final -0.6.18 -8.0.0 -7.0.7 -4.2.12 -2.11.0.M4 -2.9.10.5 -0.12.3 -3.24.0 -0.11.2 -0.1.20 -0.5.1 -1.0.7 -1.4.2-SNAPSHOT -6.5.4 -9.4.0 -1.0.246 -20.10.7 -14.6.8 -0.36.0 -0.2.14 -1.8.12 -7.0.5 -9.4.12.v20180830 -9.4.22.v20191022 -1.1.25 -1.8.2-beta5 -3.6.6 -6.13.3 -2.7.1 -1.26.4 -1.7.0.0 -4.0.3 -0.2.2.2 -5.1.0 -10.1.0 -1.0.13 -9.4.30.v20200611 -7.17.0 -0.25.0 -1.1.100 -5.4.18 -2.12.0 -3.15 -0.9.2 -0.20.8 -9.5.25 -3.4.8 -0.49.0 -3.13.0 -8.31.3 -2.9.10.7 -0.0.24 -1.13.10 -8.8.53370 -10.2.0 -4.0.0-milestone2 -1.26 -0.9 -2.5.5 -3.0.20 -7.6.5 -5.21.4 -7.0.91 -19.0.0 -6.3.5.1 -1.19.7 -0.2.2 -11.4 -1.6.23 -8.5.75 -0.9.11 -0.17.6 -2.6.7 -2.0.3 -0.0.2 -4.1a1 -1.13.2 -1.5.1 -4.10.1 -337 -8.24.0 -7.5.26 -13.2-rc-1 -1.26.7 -0.13.7 -7.5.7 -12.6.4 -1.0.31 -2.19.4 -4.17.19 -3.2.1 -1.10.9 -3.2.18 -16.0.1 -2.0.0-alpha8 -7.12.1 -3.1.1 -0.16.0 -3.18.0 -0.19.2 -0.4.2 -4.3.19 -8.1.1 -9.0.44 -0.0.43 -0.2.17 -3.2.15 -1.4.6 -1.10.8 -9.4.27 -3.4.48 -9.3.0 -9.4.23.v20191118 -4.9.2 -0.1.0 -4.3.12 -8.0.51 -5.7.1 -8.2.10 -2.0-beta-2 -7.0 -4.3.1 -0.7.0 -2.10.1 -21.10.1 -11.0.0.beta3 -0.12.4 -1.18.19 -10.0.0-alpha.0 -5.0.372 -0.79.1 -7.74 -4.1.3 -5.4.2 -3.5.3 -0.4.8 -1.10.14 -0.7.1-beta -2.12.2 -4.1.42 -5.2.20 -8.8.5 -3.2.4 -2.0.25 -2.27.1 -4.1.59.Final -0.6.4 -3.3.10 -14.5.3 -3.92 -1.5.4 -8.4.2 -5.2 -7.14.0 -3.0.0rc10 -2.320 -3.11.10 -3.0.26 -2.3.4 -2.3.10 -2.12.19 -0.4.7 -1.2.31 -0.3.8 -0.137.0 -9.1.6 -0.33.3 -1.9.6.1 -3.6.8 -6.0.4.2 -0.11.5 -2.0.15 -5.0.12 -8.0.19 -1.10.11 -0.15 -6.2.57 -1.25.8 -1.0.471 -21.11.29.1 -0.45.3 -6.4.1 -2.6.6 -4.0.0-milestone1 -12.6.7 -1.13.12 -6.6.0 -3.0.0-beta.17.5 -1.18.4 -9.0.28 -1.12.8 -1.14.7 -7.4.4 -3.0.0rc0 -1.11.0-RC1 -3.7 -3.2.7 -1.6.2 -1.5.25.1 -2.4.8 -0.44.2 -2.21.0 -2.6 -0.59.0 -3.1.43 -6.8 -5.6.6 -3.12.6 -8.5.1 -1.0.467 -0.2.25 -5.3.18 -1.3 -5.8.4 -10.4.2 -1.43.0 -1.8.14 -13.0 -0.33.5 -1.11.4 -2.25.1 -1.8.18 -4.4.13 -0.14.0 -4.7 -5.65.7 -3.3.7 -2.17.2 -2.1.1 -8.5.5 -5.1.6.2 -1.0.0a12 -10.4.1 -111.15 -3.2.12 -4.10.0 -12.0.3 -1.0.190 -1.19.8 -1.2.6 -0.2019.416 -1.14.11 -1.0.0-M23 -4.2.15 -2.10.6 -6.6.2 -0.5.5 -4.44.0 -2.0.10 -20.0.0.Final -3.93 -4.0.0-beta.7 -3.2.22 -2.1.28 -3.0.0-beta.17.8 -4.3 -13.3RC1 -9.4.17.v20190418 -2.0.0-alpha.5 -6.5.1 -0.17.0 -2.15.1 -14.4.3 -19.0.5 -2018.10.9 -1.9 -16.1.2 -1.11.12 -0.64.1 -6.4.3.1 -3.2.13 -2.11.0.M5 -2.2.7 -4.1.45 -20.0.0 -19.4.13 -11.3.1 -0.33.6 -1.11.11 -4.16.5 -4.5.7.Final -3.9.4 -4.0.37 -2.9.10.4 -19.1.1 -20.10.0 -9.2.55826 -1.3.1 -0.4.3 -20.0.9 -14.6.2 -6.1.4.1 -4.0.0-alpha1 -0.0.5 -0.46 -14.4.4 -21.11 -1.6.7 -4.9.7 -6.0.3.7 -4.0.5 -6.4.9 -5.2.4.5 -9.4.6 -4.2.7.1 -3.35.1 -10.4.0 -106.11.8-alpha.0.13 -8.5.56 -6.8.13 -7.0.9 -2.2.2 -1.4 -6.1.6 -1.0.11 -9.3.54 -6.0.10 -0.7.15 -4.0.0.21 -8.3.7 -5.9.2 -4.1.22 -9.10.3 -7.2.10.fp6 -3.4.6 -21.3.0 -1.2.11 -1.17.3 -4.4.6 -2.42.0 -0.1.25 -4.2.8 -5.3.0 -4.3.14 -1.9.2 -2.9.9 -2.3.14 -6.13.8.2 -0.6.6 -2.2.4 -3.22.0 -0.1.7 -1.0.0-alpha.4 -4.0.0-alpha2 -2.7.4 -4.2.21 -4.3.4 -12.0 -4.0-beta1 -0.1.4 -1.28.1 -3.11.1.Final -2.15.2 -6.1.7 -3.2.16 -1.2.7 -1.12.2. -9.4.32 -5.7.0 -9.0.54 -2.9.6 -2.2.10.RELEASE -10.6 -1.1.11 -2.6.9 -3.3.14 -11.1 -1.7.7 -0.9.0 -4.5.4 -13.6 -4.1.0-rc.1 -4.5.14 -0.16.6 -6.20.42 -6.1.0.0 -4.0.38 -2.1.10 -3.3.6 -9.0.8 -6.6.4 -1.16.0-snapshot.1 -7.1.10.fp18 -1.5.6 -1.6.10 -25.0.0 -1.13.4 -2.12.1 -5.0.7 -2.4.5 -3.9.11 -1.0.16 -1.6.7.2 -27.1.1 -1.16.8 -0.20.0-incubating -5.5 -0.6.2 -0.3.0M2 -3.3.9 -5.39 -2.5.26 -3.5.55 -13.0.6 -10.0.3 -1.7.3 -1.10.1 -2.5.12 -1.11.18 -1.14.6 -0.4.20 -13.3.0 -13.2RC1 -1.7 -5.2.1 -18.0.6 -42.1.0 -8.0.18 -4.6 -2.8.23 -0.3.0-beta.22 -3.3.22 -2.11.12 -2.3.19 -1.1.6-alpha.6 -10.0.11 -1.0.0-rc95 -0.15.6 -8.11.2 -3.6.0 -2.23.2 -2.9.3 -20.0.6 -6.0.4.6 -0.39.7 -0.7.7 -4.1.1 -1.16 -8.2.4 -1.24.0 -1.0.0-rc91 -10.2.10 -2.1.4 -0.54.0 -1.18.6 -1.10.15 -2.1.1.Final -2.1.58 -7.31.1 -10.1.4 -0.9.1 -8.4.3 -6.13.8.1 -5.0.0 -7.0.107 -2.12.4 -5.5.4 -2.20.0 -9.0.16 -3.0.0-alpha4 -6.4.0 -3.0.0-beta.9 -14.4.10 -0.5.8 -0.32.10 -3.0.12 -2.10.4 -23.25.1 -1.6.30 -0.2.7 -0.22.3 -3.2.21 -2.8.19 -3.1.11 -3.8.4 -1.7.28 -4.1.14 -1.13.1 -7.0.85 -3.16.0 -11.6 -6.3.0 -8.5.32 -2.68.0 -7.1.1 -1.3.16 -1.21.2 -#2019.03.6.1 -2.9.10.8 -1.8.0 -7.0.23 -4.13.1 -10.0.14 -2.2.24 -4.1.71.Final -4.1.7 -3.12.5 -3.1.61 -1.19.11 -2.13.11 -1.8.8 -5.0.11 -3.15.7 -301 -1.11.2 -9.0.35 -3.15.4 -0.0.20 -2.0.206 -5.3.11 -0.0.11 -5.0.1 -1.14 -8.8.2 -3.0.6 -2.7.6 -2.7.17 -6.1.7.2 -0.98.12.1 -2.3.6 -4.9.0 -2.22.0 -1.1.82 -6.0.2.2 -4.1.60.Final -5.7.6 -2.3.3 -3.4.1 -4.6.1 -2.1.11 -4.4.0 -8.0.53 -1.20 -0.0.0 -2.3.34 -6.6.5 -1.11.19 -0.13.0 -4.5.0 -3.9 -7.10.2 -0.22.0-M1 -10.0.19 -1.5.18 -0.25.4 -1.12.2 -6.1.5 -4.2.4 -8.5.40 -9.0.20 -4.1.46 -9.5.23 -1.4.14-jdk7 -2.26.2 -2.16.8 -4.17.0 -5.1.1 -1.14.4 -18.1.2 -4.1.13 -0.33.0 -9.5.29 -5.7.3 -1.10.3 -0.10.6 -8.5.61 -17.0.11 -1.9.5 -2.25.3 -9.2.7 -0.1.2 -5.1.13 -0.18.26 -3.7.9 -2021.9.2 -6.3.2.1 -6.4.16 -1.7.8.2 -2.11.14 -3.0.10 -0.4.11 -7.0.88 -7.6 -8.4.0 -4.30.5 -4.31.1 -6.7.0 -4.0.0-milestone5 -9.1.0 -4.6.5 -3.5 -0.6.29 -3.9.6 -0.2.0-prerelease.20200714185213 -3.15.1 -3.1.0 -0.3.16 -20190301.0.0 -3.26.5 -1.30 -2.7 -4.10.4 -1.5.7 -0.20.2 -1.6.2.2 -1.12.1 -0.7.19 -7.5.15.1 -4.0.12 -5.1a1 -0.11.0 -1.3.5 -1.11.21 -7.11.0 -1.44.0 -3.0.17 -1.7.1 -3.1.16 -1.8.10 -18.4.1 -0.14.10 -8.8.10 -16.0.0 -2.8.50 -21.7.0 -1.0.466 -6.2.12 -4.1.5 -6.0.4 -6.3.1.1 -4.1.61.Final -4.17.21 -7.2.3 -5.3.2 -0.21.29 -5.7.11 -2.1.29 -9.0.37 -2.11.3 -5.6.1 -2.0.21 -1.16.14 -1.9.25 -1.0.0-rc.9 -7.1.17 -1.6 -6.3.1 -1.2.5.1 -1.4.18 -5.9.3 -21.1 -6.3.14 -3.1.9 -3.0.19 -1.11.0 -4.1.6 -0.8.1 -0.0.18 -2.24.0 -0.21.24 -0.9.5.1 -8.14.1 -13.7.0 -1.6.4 -1.0.4.1 -0.7.29 -2.3.22 -0.9.5.4 -1.23.8 -0.32.6 -5.7.19 -4.3.0 -1.4.4 -0.2.0-prerelease.20201019174008 -0.1.0-beta.13.2 -1.16.15 -3.0.8 -2.9.2 -2.4.2 -0.21.2 -11.0.2 -19.4.6 -1.1.118 -7.24.0 -0.12.34 -2021.10.0 -3.1.2 -3.5.0 -0.3 -1.3.12 -6.18.2 -3.1.7 -1.2.0 -1.6.7.1 -0.9.5 -4.5.7 -8.0.1 -2.23.0 -1.0.0-111 -9.2.5 -2.4.24 -4.4.3 -1.11.24 -0.23.12 -5.2.10 -0.3.9 -1.2.2 -5.17.3 -2.10.5.1 -1.6.0 -1.64 -4.3.3 -8.13.0 -2.2.14 -2.3.17 -2.0.202 -1.1.6 -9.2.25.v20180606 -2.6.7.3 -1.2.21 -10.0 -2.19.3 -4.6.14 -0.0.6 -6.20.12 -4.14 -8.6.2 -4.3.11 -7.0.35 -2.2.23 -42.3.0 -8.5.51 -4.3.7 -2.0.0-alpha.0 -2.8.15 -0.25.1 -5.1.2 -4.11.0 -0.12.12 -1.5 -7.0.108 -0.7.23 -5.15.6 -1.20.7 -1.10.23 -2.5.4 -3.9.0 -3.6.5 -0.1.7a -2.1.0 -3.7.0 -3.5.4 -4.7.8 -10.0.6 -2.9.10.3 -8.0.16 -20.0.13 -5.0 -2.2.22 -3.8.2 -1.6.12 -5.3.1 -8.5.63 -0.13.1 -9.6 -0.20.1 -10.1.2 -3.0.0-alpha.0 -3.11.2 -1.24 -1.2 -4.3.15 -0.21.3 -16.4.0 -1.0.319 -6.8.8 -0.21.17 -1.9.9 -2.0.0a1 -3.2.14 -3.2.5 -4.8.0 -0.12.0 -7.2.4 -5.11.0 -12.10.9 -3.0.0-beta1 -0.5.11 -4.0.0-RC3 -1.19.3 -2.4.6 -7.3.1 -5.4.4 -1.17.1 -3.0.13 -1.33.0 -1.9.16 -6.1.5.Final -16.3.0 -1.4.7 -2.9.10.1 -1.12.9 -2.0.43 -3.0.16 -6.0.0 -10.4.6 -1.0.0-85 -1.0.469 -2.4.21 -7.9.2 -4.11.4 -2.4.0-RC1 -13.0.8 -2.8.2 -2.7.8 -2.13.0 -1.8.37 -1.15.3 -20200912 -2.0.13 -1.4.353.15 -9.5.0 -0.14.15 -19.0.1 -12.0.5 -2.27.0 -0.0.19 -1.10.5 -9.4.4 -2.6.10 -3.1.12 -3.0.18 -1.4.11 -3.1.13 -4.7.3 -9.4.37 -0.34.1 -4.9.16 -1.7.6 -0.6.3 -1.0.10 -2.3.7.1 -1.0.0-rc3 -4.2.10 -6.12.3 -1.6.6.4 -2.22.1 -3.5.1 -1.0.0-beta.9 -1.24.1 -0.0.0-20200220183623-bac4c82f6975 -0.9.9 -2.10.0 -3.1.3 -2.1.5 -3.13.1 -0.10.8 -1.2.8 -2.14.14 -4.4.46 -9.0.43 -4.1.14.1 -4.1.44 -2.0.33 -10.0-alpha1 -5.2.6.2 -0.4.21 -10.4.10 -5.2.11 -0.73.1 -4.1.8 -0.3.7 -4.12.0 -4.1.10 -8.2.0 -4.26.2 -2.0GA -8.5.64 -1.3.8 -2.3.2 -3.3.2 -5.0.4 -3.0.15 -1.6.41 -0.24.0 -5.0.8 -4.2.1 -4.1.4 -9.0.5 -4.3.23 -9.0.0-M5 -0.1.16 -1.7.21 -1.5.2 -3.20.0 -2.17.0 -7.6.52 -4.0.10 -6.1.4.6 -0.4.9 -1.1 -1.2.4 -1.5.5 -6.4.2 -1.3.28 -19.4.8 -0.1.0-beta.15.1 -1.3.11 -8.9.9 -4.0 -0.44.0 -9.1.3 -0.40.0 -3.0.7 -0.18.1 -2.1.3 -0.21.8 -1.11.13 -2.1.38 -2.9.15 -2.0.1 -2.0.8 -1.27.0 -1.11.3 -1.0.987 -2017.12.7.4 -1.7.26 -2.2.1.0 -1.11.7 -9.0.30 -1.24.4 -0.26.0 -2.319.2 -0.50.0 -2.2 -8.22.1 -3.2.22.5 -4.0.0.beta8 -0.15.3 -0.20.20 -0.1.19 -3.8.8 -6.8.3 -15.1.1 -0.10.0 -4.0.60 -0.3.18 -7.3.3 -3.16.1 -10.0.16 -1.1.2 -16.3.3 -0.23.6-1 -3.7.9.Final -2.8.1 -6.1.0 -2.2.10 -8.1 -9.4.33 -4.0.0-milestone4 -1.2.5 -1.12 -0.2.5 -20.0.4 -0.13.10 -11.0.0 -3.6.4 -0.19.0 -12.10.7 -1.21.0 -0.21.4 -2.6.1 -4.9.6 -1.0.470 -8.1.6 -2.3.9 -5.7.8 -1.16.1 -17.2.3 -4.3.0.0 -7.2.0 -1.15 -6.0.5 -1.7.5 -12.0.11 -0.9.17 -0.6.15 -3.2.20 -1.6.16 -4.4.0.1 -3.11.12 -1.6.8 -1.12.4 -2.1.8 -6.0.3.3 -8.75.0 -3.11.5 -5.4.3 -4.2.5.1 -1.6.0-beta1 -0.32.0 -5.2.4.6 -6.2-rc-1 -5.5.0 -5.2.4.2 -17.9.2 -10.0.0-beta.1 -1.19.9 -0.2.23 -5.6.5 -12.4.1 -10.1.3 -1.0.2 -11.0.0-beta.0 -1.4.8 -3.1.29 -1.0.1.1 -2.7.7 -2.5.3 -3.24 -1.1.4 -2.1.0pre1 -1.2.9 -8.13.8 -3.8.1 -1.12.6 -3.5.7 -4.15.1 -20211018.1 -2.9.4 -4.1 -6.2.2 -5.2.27 -1.25.2 -0.2.15 -4.3b1 -9.2.22.v20170606 -14.0.0 -9.4.24.v20191120 -3.8.39 -8.9 -12.10.0 -1.18 -5.5.5 -4.4.18 -1.10.10 -2.7.0.0 -2.0.24 -13.6-rc-1 -1.7.12 -3.0.3 -2.2.8 -5.1.3 -6.0.3.5 -1.0.1.18 -5.5.13 -0.1.3 -4.2.11.1 -3.7.13 -9.0.0-M4 -0.10.1 -2.21.1 -0.12 -5.1.18 -2.16 -1.7.2 -7.0.94 -8.3.0 -0.9.74 -1.3.1.1 -5.4.7 -8.5.65 -1.4.2 -4.4.2 -2.39.0-2.7.0 -3.0.3.3 -16.1.0 -1.3.13 -0.99.0 -4.29.3 -10.2.7 -8.5.57 -9.2.6 -1.0.3 -0.23 -9.0.17 -5.15.9 -10.1.0-M6 -7.0.0 -2.0.7 -8.0.13 -7.11.2 -10.0.4 -1.2.1 -0.23.1 -77 -8.9.0 -4.5.3.Final -19.10.0 -0.23.4 -3.11 -4.1rc3 -0.7.34 -4.7.2 -20.12.6 -0.2.4 -2.0.2.0 -2.13.2 -8.1.0 -4.11.5 -2.6.3 -3.6.10 -8.8.11 -1.10.7 -6.0.3.1 -5.2.1-rc1 -11.0.1 -4.9.11 -5.5.3 -1.33.2 -9.4.41 -0.19 -2.3.18 -4.3.6 -1.8.19 -0.2.6 -3.7.7 -4.7.0 -2.19.6 -0.9.4 -2.7.3 -1.3.21 -15.0.5 -2.16.11 -22.2.0 -42.2.13 -2.7.22 -1.14.5 -1.23.1 -10.0.10 -1.0.0-beta.59 -2.10.2 -2.7.0 -2.0.0b2 -0.1.11 -1.6.43 -2015.4.18-a -16.8.2 -1.1.21 -3.8 -0.27.0 -0.21.27 -0.11.8 -0.0.26 -16.3.1 -0.8.12 -0.12.2 -2.1.2 -1.11 -4.9.10 -7.30.3 -1.4.16 -0.4.36 -5.3.16 -3.10.0-beta -0.36.1 -7.1 -5.0.5 -9.2.0 -2.0.0-201 -1.14.8 -0.22.5 -1.0.473 -2.4.4 -1.14.1 -0.0.1-1 -8.10.0 -5.11 -1.3.4 -4.0.0-beta7 -2.4.17 -1.16.2 -1.10.4 -0.2.13 -5.6.10 -21.0.3 -10.0.0-M9 -3.2.6 -2.7.2 -1.1.5 -4.8.4 -12.2.1 -1.26.0 -3.21.0 -6.1.8 -6.8.15 -6.1.9 -2.0.35 -3.22.1 -12.9 -7.4.6 -2.11.13 -8.9.19 -3.4.4 -2.9.10 -1.14.0 -3.21.2 -27.0.0 -5.3.10 -0.14.3 -7.1.4 -4.1.0 -3.5.3.24 -0.22.0-RC1 -0.9.23 -0.27.1 -0.53.1 -2.9.1 -2.21.5 -2.9.10.2 -1.11.29 -2.5.14 -2.34 -0.8.20 -0.7.9 -0.38.0 -4.8.1 -4.7.17.4 -3.4.11 -10.0.0-M7 -3.0.4 -4.0.0 -2.15.0 -4.2.6 -1.11.10 -12.3.2 -1.9.3.6 -5.1.4 -1.8.7 -0.10.16 -0.6.13 -2.8.12 -9.4.11.v20180605 -19.7.0 -4.44.3 -2.37.0 -4.3.20 -0.2.11 -1.0.0-rc.0 -5.6.9 -1.1.1 -2.7.26 -1.0.6 -2.17.6 -14.3.0 -1.8.4 -1.20.11 -3.1.15 -1.18.5 -2.92.0 -7.6.51 -3.8.3 -4.16.2 -6.25.1 -2.8.11.5 -0.27.2 -6.5.2 -8.5.72 -3.17.0 -5.15.5 -2.4.19 -0.4.0 -2.16.5 -4.2.7 -2.2.9 -0.3.20 -6.7.2 -#2018.06.0 -2.0.0 -4.0.2 -7.0.87 -2.26.1 -9.0.0.M1 -2.4.16 -3.1.63 -3.3.24 -2.16.1 -0.40.2 -1.47.1 -9.1.13 -2.0.5 -4.2.0 -7.7.22 -3.0.24 -7.1.2 -5.16.1 -4.1.0.beta1 -0.3.2 -0.1.1 -4.0.0.beta1 -0.9.20 -1.9.15 -0.27.11 -3.5.15 -1.4.0 -0.4.15 -9.0.29 -5.2.24 -3.7.1 -2.8.6 -1.1.9 -2.3.8 -1.58.2 -0.18.2 -7.0.79 -11.21.0 -1.8.1 -0.0.10 -4.3.9 -5.3.12 -4.1.43 -2.9.13 -0.7.14 -2.330 -13.4.3 -12.0.8 -3.3.0 -1.3.352.12 -2.23.1 -8.5.9 -4.9.5 -4.7.4 -1.0.475 -5.3.9 -6.0.3.4 -13.10.0 -2.0.9 -0.16.4 -2.0.45 -3.10.0 -2020.4.0 -21.12.22.1 -10.0.0-M6 -1.1.19 -0.10.9 -9.0.0M1 -12.10.11 -20.0.1 -4.4.35 -2.9.0 -1.7.0 -5.2.1.1 -19.2.1 -1.21.1 -8.9.2 -111.9 -2.0 -2.7.12 -12.3.3 -1.0 -3.1 -2.12 -9.4.6.v20170531 -0.8.11 -1.0.0-rc1 -8.0.17 -12.1.0 -2.3.24 -2.3 -0.35.3 -1.9.4 -2.9.12 -2.0.4 -1.19 -8.5.48 -6.1.3 -7.0.8 -0.9.13 -0.7.4 -0.15.0 -15.0.1 -13.10.3 -9.5.12 -1.0.990 -2.5.24.1 -13.5RC1 -4.2.3 -5.2.14 -6.0.20.Final -2.1.27 -1.22 -2.10.0.0 -4.4.14 -1.66 -13.4.1 -4.0.1 -1.15.4 -3.4.35 -0.6.1 -5.0.7.1 -2.13 -0.6.10 -3.18.2 -1.3.7 -3.2 -9.2.9.v20150224 -0.11.17 -1.5.9 -0.2.0 -3.3.0-13 -1.7.15 -11.3.2 -0.14.8 -4.0.7 -1.7.8 -0.3.1 -5.2.9 -3.0.22 -0.38.12-beta.1 -1.18.0 -1.20.0 -0.0.15 -2.28 -8.7.42 -42.3.3 -3.4.0 -0.9.6 -0.22.2 -3.0 -0.5.9 -2.2.1 -1.11.6 -1.4.10 -0.12.6 -9.0.41 -4.6.4 -12.7 -#19.04.15 -5.3.3 -3.8.11 -5.3.6 -0.119.0.beta1 -2.6.2 -7.0.104 -1.13.5 -1.0a1 -0.2.9 -2.8.9 -6.1.11 -11.10.5 -12.6.8 -5.5.1 -0.3.12 -5.7.7 -3.2.22.1 -1.3.31 -2.3.7 -7.4.1 -1.11.17 -9.5.28 -3.0.367 -8.18 -1.2.18 -0.3.9.7 -3.0.0-beta.18.4 -3.12.1 -3.5.18 -2.13.8 -15.0.0 -0.4.16 -0.16.41 -9.0.46 -1.0.0 -3.1.8 -1.13 -8.0.2 -11.26.3 -7.2.2 -1.65 -4.9.8 -7.0.0.0 -2.0.0-beta.1 -42.2.5 -1.19.1 -8.12.0 -4.3.18 -1.0.421 -3.11.8 -4.16.1 -3.14.4 -5.20.21 -9.0.39 -2.7.13 -5.4.1 -3.2.0.4 -4.7.1 -3.23.11 -4.4.7 -4.5.3 -2.0.0-209 -8.7.0 -0.63.0 -12.0.9 -8.9.6 -1.3.2 -9.2.2 -21.1.0 -2.11.4 -1.7.9 -0.25.5 -6.2.4 -2.3.1.0 -5.8.0 -9.0.0-M2 -6.18.35 -3 -2.0.11 -1.0.472 -0.1.12 -2.2.3 -7.5.0 -1.21 -6.0.6 -20.0.2 -1.19.0 -11.0.0.beta1 -3.0.2-RC1 -2.1.7 -5.8.11 -3.8.6 -0.13.4 -0.23.0 -2.2.27 -3.0.21 -0.3.5 -5.15.14 -2.25.4 -3.0.14 -5.0.0.1 -8.5.31 -5.8.2 -9.0.1 -3.34.0-01 -21.11.2 -2.7.15 -4.1.0.beta2 -5.0.7.2 -3.8.2.Final -2.11.2 -1.4.17 -9.3.25.v20180904 -1.1.8 -4.1.48 -9.5.6 -3.0.1 -1.10.0 -1.17.6 -5.0.6 -7.0.28 -2.1.210 -3.91 -2.7.18 -4.6.2 -0.1.29 -11.1.1 -1.8.16 -3.0.9 -1.6.9 -9.3.26.v20190403 -10.0.0.beta3 -0.42.1 -19.4.15 -1.21.5 -10.2.6 -0.20.3-incubating -2.4.0 -0.6 -1.6.6 -3.7.4 -4.3.8 -9.4.35.v20201120 -2.6.7.4 -3.15.0 -7.6.0 -8.0.8 -0.3.89 -3.7.2 -4.3.17 -4.2.11 -2.0.20 -2.97.1 -11.10.6 -4.9.3 -4.16.0 -11.0.6.Final -1.0.4 -16.2.1 -5.2.22 -1.0.0-rc.7 -1.17.2 -3.0.27 -0.8.0 -6.3.2 -8.5.60 -1.9.21 -10.0.0 -4.0.2.8 -5.58.2 -9.0.0.pre1 -2.2.5 -4.4.5 -5.2.18 -19.2 -9.0.9 -0.40.3 -42.3.2 -3.2.22.2 -7.2.1 -4.17.11 -3.5.6 -1.10.12 -8.5.66 -0.23.2 -1.4.1 -2.7.9 -10.0.0.beta1 -1.0.1 -5.1.5 -4.4.16 -1.1.17 -0.30.1 -42.2.25 -0.5.3 -0.14.2 -4.5.2 -3.5.14 -0.7.22 -14.0.3 -0.47.5 -3.2.22.3 -3.0.2 -1.0.0a20 -2.1.15 -10.0.5 -8.5.3 -3.3 -3.1.10 -0.5.2 -4.1.11 -2.18.2 -1.8.5 -2.1.9 -0.35.2 -5.1.10 -11.1.4 -1.7.25 -2.6.12 -1.0.100 -1.50.1 -5.7.5 -9.5.4 -111.14 -1.6.44 -2.2.0 -2.8.5 -2.9.8 -3.10.3 -3.10.2 -2.4.10 -0.71.0 -18.10.8 -#19.04.5 -9.18.2 -1.13.11 -5.2.4.4 -3.4.7.28 -6.1.4.2 -2.15.5 -3.4.5 -1.25.0 -2.3.13 -2.7.21 -2.17.5 -4.2.9 -2.8.4 -5.3.5 -1.1.0.1 -5.0.10 -0.1.30 -3.6.1 -1.9.24 -5.15.12 -10.2.9 -1.8 -11.2.0 -0.33.10 -3.8.0 -3.7.8 -1.65.1 -0.9.3 -3.2.3 -2.9.9.1 -0.12.10 -5.0.9 -3.23.1 -3.0.0-alpha2 -4.1.68.Final -1.23 -8.0.4 -4.0.14 -2.1.90 -22.1 -0.2.1 -5.0.22 -#20.04.13 -3.3.beta1 -5.21.0 -1.4.5 -1.15.5 -4.17.5 -2.14.2 -8.6.6 -20.3.0 -1.0.468 -0.62.3 -1.4.3 -4.1.14.2 -6.13.4 -10.0.0-M5 -3.3.12 -1.4.365.10 -21.12.1 -1.0.474 -1.0.910 -1.41.1 -1.8.15 -1.34.0 -2.9.7 -0.21.1 -11.0.3 -6.0 -1.53.3 -4.1.1.26 -4.30.2 -4.17.13 -0.1.0-beta.14 -5.3.4 -1.11.5 -1.4.13 -0.6.0 -0.1 -1.3.6 -2.2.0.Final -5.0.2 -4.3.16 -3.12.4 -8.5.55 -0.8.5 -2.2.18 -0.97.0 -8.0.0-beta.0 -13.6RC1 -6.3.3 -4.9.4 -1.15.2 -6.0.0.0 -9.0.0-M3 -2.1.6 -0.0.4 -9.0.0-beta.0 -1.12.12 -5.1.6.1 -#2019.11.09 -9.1 -0.7.3 -8.5.38 -0.13.3 -4.11.7 -5.0.0.0 -0.15.1 -1.13.8 -5.3.35 -1.6.11 -4.6.0 -7.16.3 -5.4.24.Final -0.15.2 -0.21.0 -0.29.2 -1.11.14 -2.2.25 -3.1.6 -5.2.2.1 -4.2.11.3 -0.7.2 -1.26.5 -10.4.19 -2.6.0 -0.0.3 -0.5.0 -3.5.2 -3.4.2 -9.2.28.v20190418 -2.10 -4.6.3 -9.0.0-M1 -7.5.11 -1.9.0 -2.1.13 -4.3.5 -9.0.10 -5.7.4 -2.1.12 -10.1.1 -1.60 -1.9.20 -8.0 -3.6.15 -9.3.2 -0.24.1 -0.0.9 -1.6.15 -7.14.3 -3.2.19 -3.3.1 -0.0.14 -1.7.13 -1.19.15 -1.1.3 -3.19.2 -9.0.0 -2.3.32 -0.10.2 -1.74.0 -9.1.1 -5.1 -0.15.23 -0.125.1 -4.0.0-milestone3 -4.6.7 -0.34.9 -2.9.10.6 -0.3.4 -9.5.17 -2020.8.1 -1.5.0 -8.29 -85.3.130 -12.10.3 -2.0.34 -1.4.359.31 -4.1.2.30 -8.3.2 -0.6.7 -2.12.3 -3.10.1 -9.4.16.v20190411 -0.3.0-beta.83 -1.10.13 -2.4.7 -1.2.65 -6.0.4.1 -1.8.3 -0.7.30 -9.0.0.M5 -2.8.3 -2.5.2 -0.3.3 -0.7.5 -1.1.10 -6.0.3 -2.0.0-rc.12 -9.4.39 -1.9.18 -13.2 -4.9.18 -3.7.3 -1.7.14 -3.8.0.Final -4.4.15 -9.4.1208 -1.51 -9.5.20 -6.8.6 -14.0.1 -4.73.1 diff --git a/internal/semantic/fixtures/alpine-versions.txt b/internal/semantic/fixtures/alpine-versions.txt index 22ed323d48..53bc7e3d2a 100644 --- a/internal/semantic/fixtures/alpine-versions.txt +++ b/internal/semantic/fixtures/alpine-versions.txt @@ -47,6 +47,14 @@ 1.2_svn1 > 1.2_csv2 1.2_cvs1 > 1.2_blah +# technically invalid, but "apk version -t" still accepts them +.1 < 1 +1 > .1 +.2 > .1 +a < b +c > b +c = c + # from https://raw.githubusercontent.com/alpinelinux/apk-tools/master/tests/version.data 2.34 > 0.1.0_alpha # todo: this might be invalid? diff --git a/internal/semantic/parse.go b/internal/semantic/parse.go index d555f2c1ab..b507ce806e 100644 --- a/internal/semantic/parse.go +++ b/internal/semantic/parse.go @@ -22,36 +22,36 @@ func MustParse(str string, ecosystem models.Ecosystem) Version { func Parse(str string, ecosystem models.Ecosystem) (Version, error) { //nolint:exhaustive // Using strings to specify ecosystem instead of lockfile types switch ecosystem { - case "npm": + case "Alpine": + return parseAlpineVersion(str), nil + case "ConanCenter": return parseSemverVersion(str), nil + case "CRAN": + return parseCRANVersion(str), nil case "crates.io": return parseSemverVersion(str), nil case "Debian": return parseDebianVersion(str), nil - case "Ubuntu": - return parseDebianVersion(str), nil - case "Alpine": - return parseAlpineVersion(str), nil - case "RubyGems": - return parseRubyGemsVersion(str), nil - case "NuGet": - return parseNuGetVersion(str), nil - case "Packagist": - return parsePackagistVersion(str), nil case "Go": return parseSemverVersion(str), nil case "Hex": return parseSemverVersion(str), nil case "Maven": return parseMavenVersion(str), nil - case "PyPI": - return parsePyPIVersion(str), nil - case "Pub": + case "npm": return parseSemverVersion(str), nil - case "ConanCenter": + case "NuGet": + return parseNuGetVersion(str), nil + case "Packagist": + return parsePackagistVersion(str), nil + case "Pub": return parseSemverVersion(str), nil - case "CRAN": - return parseCRANVersion(str), nil + case "PyPI": + return parsePyPIVersion(str), nil + case "RubyGems": + return parseRubyGemsVersion(str), nil + case "Ubuntu": + return parseDebianVersion(str), nil } return nil, fmt.Errorf("%w %s", ErrUnsupportedEcosystem, ecosystem) diff --git a/internal/semantic/parse_test.go b/internal/semantic/parse_test.go index 513af3d312..99f8083ce7 100644 --- a/internal/semantic/parse_test.go +++ b/internal/semantic/parse_test.go @@ -14,8 +14,7 @@ func TestParse(t *testing.T) { ecosystems := lockfile.KnownEcosystems() - // todo: remove once CRAN is supported by lockfile - ecosystems = append(ecosystems, "CRAN") + ecosystems = append(ecosystems, "Alpine", "Debian", "Ubuntu") for _, ecosystem := range ecosystems { _, err := semantic.Parse("", models.Ecosystem(ecosystem)) @@ -37,8 +36,7 @@ func TestMustParse(t *testing.T) { ecosystems := lockfile.KnownEcosystems() - // todo: remove once CRAN is supported by lockfile - ecosystems = append(ecosystems, "CRAN") + ecosystems = append(ecosystems, "Alpine", "Debian", "Ubuntu") for _, ecosystem := range ecosystems { semantic.MustParse("", models.Ecosystem(ecosystem)) diff --git a/internal/semantic/version-alpine.go b/internal/semantic/version-alpine.go index 722b989b4d..9030e51812 100644 --- a/internal/semantic/version-alpine.go +++ b/internal/semantic/version-alpine.go @@ -223,6 +223,10 @@ func (v AlpineVersion) CompareStr(str string) int { func parseAlpineNumberComponents(v *AlpineVersion, str string) string { sub := cachedregexp.MustCompile(`^((\d+)\.?)*`).FindString(str) + if sub == "" { + return str + } + for i, d := range strings.Split(sub, ".") { v.components = append(v.components, alpineNumberComponent{ value: convertToBigIntOrPanic(d), diff --git a/internal/semantic/version-pypi.go b/internal/semantic/version-pypi.go index ab3f1af76e..8363a1e3c4 100644 --- a/internal/semantic/version-pypi.go +++ b/internal/semantic/version-pypi.go @@ -184,16 +184,6 @@ func (pv PyPIVersion) compareRelease(pw PyPIVersion) int { return pv.release.Cmp(pw.release) } -func (pv PyPIVersion) preIndex() int { - for i, pre := range []string{"a", "b", "rc"} { - if pre == pv.pre.letter { - return i - } - } - - panic("unknown prefix " + pv.pre.letter) -} - // Checks if this PyPIVersion should apply a sort trick when comparing pre, // which ensures that i.e. 1.0.dev0 is before 1.0a0. func (pv PyPIVersion) shouldApplyPreTrick() bool { @@ -222,12 +212,8 @@ func (pv PyPIVersion) comparePre(pw PyPIVersion) int { case pw.pre.number == nil: return -1 default: - ai := pv.preIndex() - bi := pw.preIndex() - - if ai == bi { - return pv.pre.number.Cmp(pw.pre.number) - } + ai := pv.pre.letter[0] + bi := pw.pre.letter[0] if ai > bi { return +1 @@ -236,7 +222,7 @@ func (pv PyPIVersion) comparePre(pw PyPIVersion) int { return -1 } - return 0 + return pv.pre.number.Cmp(pw.pre.number) } } diff --git a/internal/semantic/version-rubygems.go b/internal/semantic/version-rubygems.go index d289fe5efd..2904d21193 100644 --- a/internal/semantic/version-rubygems.go +++ b/internal/semantic/version-rubygems.go @@ -1,7 +1,6 @@ package semantic import ( - "strconv" "strings" ) @@ -75,8 +74,6 @@ func canonicalSegments(segs []string) (canSegs []string) { func compareRubyGemsComponents(a, b []string) int { numberOfComponents := max(len(a), len(b)) - var compare int - for i := range numberOfComponents { as := fetch(a, i, "0") bs := fetch(b, i, "0") @@ -86,42 +83,18 @@ func compareRubyGemsComponents(a, b []string) int { switch { case aIsNumber && bIsNumber: - compare = ai.Cmp(bi) + if diff := ai.Cmp(bi); diff != 0 { + return diff + } case !aIsNumber && !bIsNumber: - compare = strings.Compare(as, bs) + if diff := strings.Compare(as, bs); diff != 0 { + return diff + } case aIsNumber: - compare = +1 + return +1 default: - compare = -1 - } - - if compare != 0 { - if compare > 0 { - return 1 - } - - return -1 - } - } - - if len(a) > len(b) { - next := a[len(b)] - - if _, err := strconv.Atoi(next); err == nil { - return 1 - } - - return -1 - } - - if len(a) < len(b) { - next := b[len(a)] - - if _, err := strconv.Atoi(next); err == nil { return -1 } - - return +1 } return 0 diff --git a/internal/semantic/version-semver-like.go b/internal/semantic/version-semver-like.go index fbb63bc689..3ccfe91846 100644 --- a/internal/semantic/version-semver-like.go +++ b/internal/semantic/version-semver-like.go @@ -38,10 +38,6 @@ func (v *SemverLikeVersion) fetchComponentsAndBuild(maxComponents int) (Componen func ParseSemverLikeVersion(line string, maxComponents int) SemverLikeVersion { v := parseSemverLike(line) - if maxComponents == -1 { - return v - } - components, build := v.fetchComponentsAndBuild(maxComponents) return SemverLikeVersion{ @@ -60,7 +56,6 @@ func parseSemverLike(line string) SemverLikeVersion { currentCom := "" foundBuild := false - emptyComponent := false leadingV := strings.HasPrefix(line, "v") line = strings.TrimPrefix(line, "v") @@ -94,15 +89,11 @@ func parseSemverLike(line string) SemverLikeVersion { components = append(components, v) currentCom = "" - - emptyComponent = false } // a component terminator means there might be another component // afterwards, so don't start parsing the build string just yet if c == '.' { - emptyComponent = true - continue } @@ -118,19 +109,6 @@ func parseSemverLike(line string) SemverLikeVersion { components = append(components, v) currentCom = "" - emptyComponent = false - } - - // if we ended with an empty component section, - // prefix the build string with a '.' - if emptyComponent { - currentCom = "." + currentCom - } - - // if we found no components, then the v wasn't actually leading - if len(components) == 0 && leadingV { - leadingV = false - currentCom = "v" + currentCom } return SemverLikeVersion{ diff --git a/internal/version/version.go b/internal/version/version.go index 23920c77e5..995b31d6ea 100644 --- a/internal/version/version.go +++ b/internal/version/version.go @@ -1,4 +1,4 @@ package version // OSVVersion is the current release version, you should update this variable when doing a release -var OSVVersion = "1.9.0" +var OSVVersion = "1.9.1" diff --git a/pkg/config/config.go b/pkg/config/config.go deleted file mode 100644 index 3e0058276a..0000000000 --- a/pkg/config/config.go +++ /dev/null @@ -1,270 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package config - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "slices" - "strings" - "time" - - "github.com/BurntSushi/toml" - "github.com/google/osv-scanner/pkg/models" - "github.com/google/osv-scanner/pkg/reporter" -) - -const osvScannerConfigName = "osv-scanner.toml" - -// Ignore stuttering as that would be a breaking change -// TODO: V2 rename? -// -// Deprecated: this is now private and should not be used outside the scanner -// -//nolint:revive -type ConfigManager struct { - // Override to replace all other configs - OverrideConfig *Config - // Config to use if no config file is found alongside manifests - DefaultConfig Config - // Cache to store loaded configs - ConfigMap map[string]Config -} - -// Deprecated: this is now private and should not be used outside the scanner -type Config struct { - IgnoredVulns []IgnoreEntry `toml:"IgnoredVulns"` - PackageOverrides []PackageOverrideEntry `toml:"PackageOverrides"` - GoVersionOverride string `toml:"GoVersionOverride"` - // The path to config file that this config was loaded from, - // set by the scanner after having successfully parsed the file - LoadPath string `toml:"-"` -} - -// Deprecated: this is now private and should not be used outside the scanner -type IgnoreEntry struct { - ID string `toml:"id"` - IgnoreUntil time.Time `toml:"ignoreUntil"` - Reason string `toml:"reason"` -} - -// Deprecated: this is now private and should not be used outside the scanner -type PackageOverrideEntry struct { - Name string `toml:"name"` - // If the version is empty, the entry applies to all versions. - Version string `toml:"version"` - Ecosystem string `toml:"ecosystem"` - Group string `toml:"group"` - Ignore bool `toml:"ignore"` - Vulnerability Vulnerability `toml:"vulnerability"` - License License `toml:"license"` - EffectiveUntil time.Time `toml:"effectiveUntil"` - Reason string `toml:"reason"` -} - -func (e PackageOverrideEntry) matches(pkg models.PackageVulns) bool { - if e.Name != "" && e.Name != pkg.Package.Name { - return false - } - if e.Version != "" && e.Version != pkg.Package.Version { - return false - } - if e.Ecosystem != "" && e.Ecosystem != pkg.Package.Ecosystem { - return false - } - if e.Group != "" && !slices.Contains(pkg.DepGroups, e.Group) { - return false - } - - return true -} - -// Deprecated: this is now private and should not be used outside the scanner -type Vulnerability struct { - Ignore bool `toml:"ignore"` -} - -// Deprecated: this is now private and should not be used outside the scanner -type License struct { - Override []string `toml:"override"` - Ignore bool `toml:"ignore"` -} - -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldIgnore(vulnID string) (bool, IgnoreEntry) { - index := slices.IndexFunc(c.IgnoredVulns, func(e IgnoreEntry) bool { return e.ID == vulnID }) - if index == -1 { - return false, IgnoreEntry{} - } - ignoredLine := c.IgnoredVulns[index] - - return shouldIgnoreTimestamp(ignoredLine.IgnoreUntil), ignoredLine -} - -func (c *Config) filterPackageVersionEntries(pkg models.PackageVulns, condition func(PackageOverrideEntry) bool) (bool, PackageOverrideEntry) { - index := slices.IndexFunc(c.PackageOverrides, func(e PackageOverrideEntry) bool { - return e.matches(pkg) && condition(e) - }) - if index == -1 { - return false, PackageOverrideEntry{} - } - ignoredLine := c.PackageOverrides[index] - - return shouldIgnoreTimestamp(ignoredLine.EffectiveUntil), ignoredLine -} - -// ShouldIgnorePackage determines if the given package should be ignored based on override entries in the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldIgnorePackage(pkg models.PackageVulns) (bool, PackageOverrideEntry) { - return c.filterPackageVersionEntries(pkg, func(e PackageOverrideEntry) bool { - return e.Ignore - }) -} - -// Deprecated: Use ShouldIgnorePackage instead -func (c *Config) ShouldIgnorePackageVersion(name, version, ecosystem string) (bool, PackageOverrideEntry) { - return c.ShouldIgnorePackage(models.PackageVulns{ - Package: models.PackageInfo{ - Name: name, - Version: version, - Ecosystem: ecosystem, - }, - }) -} - -// ShouldIgnorePackageVulnerabilities determines if the given package should have its vulnerabilities ignored based on override entries in the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldIgnorePackageVulnerabilities(pkg models.PackageVulns) bool { - overrides, _ := c.filterPackageVersionEntries(pkg, func(e PackageOverrideEntry) bool { - return e.Vulnerability.Ignore - }) - - return overrides -} - -// ShouldOverridePackageLicense determines if the given package should have its license ignored or changed based on override entries in the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldOverridePackageLicense(pkg models.PackageVulns) (bool, PackageOverrideEntry) { - return c.filterPackageVersionEntries(pkg, func(e PackageOverrideEntry) bool { - return e.License.Ignore || len(e.License.Override) > 0 - }) -} - -// Deprecated: Use ShouldOverridePackageLicense instead -func (c *Config) ShouldOverridePackageVersionLicense(name, version, ecosystem string) (bool, PackageOverrideEntry) { - return c.ShouldOverridePackageLicense(models.PackageVulns{ - Package: models.PackageInfo{ - Name: name, - Version: version, - Ecosystem: ecosystem, - }, - }) -} - -func shouldIgnoreTimestamp(ignoreUntil time.Time) bool { - if ignoreUntil.IsZero() { - // If IgnoreUntil is not set, should ignore. - return true - } - // Should ignore if IgnoreUntil is still after current time - // Takes timezone offsets into account if it is specified. otherwise it's using local time - return ignoreUntil.After(time.Now()) -} - -// Sets the override config by reading the config file at configPath. -// Will return an error if loading the config file fails -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *ConfigManager) UseOverride(configPath string) error { - config, configErr := tryLoadConfig(configPath) - if configErr != nil { - return configErr - } - c.OverrideConfig = &config - - return nil -} - -// Attempts to get the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *ConfigManager) Get(r reporter.Reporter, targetPath string) Config { - if c.OverrideConfig != nil { - return *c.OverrideConfig - } - - configPath, err := normalizeConfigLoadPath(targetPath) - if err != nil { - // TODO: This can happen when target is not a file (e.g. Docker container, git hash...etc.) - // Figure out a more robust way to load config from non files - // r.PrintErrorf("Can't find config path: %s\n", err) - return Config{} - } - - config, alreadyExists := c.ConfigMap[configPath] - if alreadyExists { - return config - } - - config, configErr := tryLoadConfig(configPath) - if configErr == nil { - r.Infof("Loaded filter from: %s\n", config.LoadPath) - } else { - // anything other than the config file not existing is most likely due to an invalid config file - if !errors.Is(configErr, os.ErrNotExist) { - r.Errorf("Ignored invalid config file at: %s\n", configPath) - r.Verbosef("Config file %s is invalid because: %v\n", configPath, configErr) - } - // If config doesn't exist, use the default config - config = c.DefaultConfig - } - c.ConfigMap[configPath] = config - - return config -} - -// Finds the containing folder of `target`, then appends osvScannerConfigName -func normalizeConfigLoadPath(target string) (string, error) { - stat, err := os.Stat(target) - if err != nil { - return "", fmt.Errorf("failed to stat target: %w", err) - } - - var containingFolder string - if !stat.IsDir() { - containingFolder = filepath.Dir(target) - } else { - containingFolder = target - } - configPath := filepath.Join(containingFolder, osvScannerConfigName) - - return configPath, nil -} - -// tryLoadConfig attempts to parse the config file at the given path as TOML, -// returning the Config object if successful or otherwise the error -func tryLoadConfig(configPath string) (Config, error) { - config := Config{} - m, err := toml.DecodeFile(configPath, &config) - if err == nil { - unknownKeys := m.Undecoded() - - if len(unknownKeys) > 0 { - keys := make([]string, 0, len(unknownKeys)) - - for _, key := range unknownKeys { - keys = append(keys, key.String()) - } - - return Config{}, fmt.Errorf("unknown keys in config file: %s", strings.Join(keys, ", ")) - } - - config.LoadPath = configPath - } - - return config, err -} diff --git a/pkg/config/config_internal_test.go b/pkg/config/config_internal_test.go deleted file mode 100644 index 2336c2ae23..0000000000 --- a/pkg/config/config_internal_test.go +++ /dev/null @@ -1,1320 +0,0 @@ -package config - -import ( - "fmt" - "reflect" - "strings" - "testing" - "time" - - "github.com/google/go-cmp/cmp" - "github.com/google/osv-scanner/pkg/models" -) - -// Attempts to normalize any file paths in the given `output` so that they can -// be compared reliably regardless of the file path separator being used. -// -// Namely, escaped forward slashes are replaced with backslashes. -func normalizeFilePaths(t *testing.T, output string) string { - t.Helper() - - return strings.ReplaceAll(strings.ReplaceAll(output, "\\\\", "/"), "\\", "/") -} - -func Test_normalizeConfigLoadPath(t *testing.T) { - t.Parallel() - - type args struct { - target string - } - tests := []struct { - name string - args args - want string - wantErr bool - }{ - { - name: "target does not exist", - args: args{ - target: "./fixtures/testdatainner/does-not-exist", - }, - want: "", - wantErr: true, - }, - { - name: "target is file in directory", - args: args{ - target: "./fixtures/testdatainner/innerFolder/test.yaml", - }, - want: "fixtures/testdatainner/innerFolder/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is inner directory with trailing slash", - args: args{ - target: "./fixtures/testdatainner/innerFolder/", - }, - want: "fixtures/testdatainner/innerFolder/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is inner directory without trailing slash", - args: args{ - target: "./fixtures/testdatainner/innerFolder", - }, - want: "fixtures/testdatainner/innerFolder/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is directory with trailing slash", - args: args{ - target: "./fixtures/testdatainner/", - }, - want: "fixtures/testdatainner/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is file in directory", - args: args{ - target: "./fixtures/testdatainner/some-manifest.yaml", - }, - want: "fixtures/testdatainner/osv-scanner.toml", - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - got, err := normalizeConfigLoadPath(tt.args.target) - if (err != nil) != tt.wantErr { - t.Errorf("normalizeConfigLoadPath() error = %v, wantErr %v", err, tt.wantErr) - return - } - - got = normalizeFilePaths(t, got) - if got != tt.want { - t.Errorf("normalizeConfigLoadPath() got = %v, want %v", got, tt.want) - } - }) - } -} - -func Test_tryLoadConfig(t *testing.T) { - t.Parallel() - - type args struct { - configPath string - } - tests := []struct { - name string - args args - want Config - wantErr bool - }{ - { - name: "config does not exist", - args: args{ - configPath: "./fixtures/testdatainner/does-not-exist", - }, - want: Config{}, - wantErr: true, - }, - { - name: "config has some ignored vulnerabilities and package overrides", - args: args{ - configPath: "./fixtures/testdatainner/osv-scanner.toml", - }, - want: Config{ - LoadPath: "./fixtures/testdatainner/osv-scanner.toml", - IgnoredVulns: []IgnoreEntry{ - { - ID: "GO-2022-0968", - }, - { - ID: "GO-2022-1059", - }, - }, - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - Reason: "abc", - }, - { - Name: "my-pkg", - Version: "1.0.0", - Ecosystem: "Go", - Reason: "abc", - Ignore: true, - License: License{ - Override: []string{"MIT", "0BSD"}, - }, - }, - }, - }, - wantErr: false, - }, - { - name: "load path cannot be overridden via config", - args: args{ - configPath: "./fixtures/testdatainner/osv-scanner-load-path.toml", - }, - want: Config{ - LoadPath: "", - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - got, err := tryLoadConfig(tt.args.configPath) - if (err != nil) != tt.wantErr { - t.Errorf("tryLoadConfig() error = %v, wantErr %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(tt.want, got); diff != "" { - t.Errorf("tryLoadConfig() mismatch (-want +got):\n%s", diff) - } - }) - } -} - -func TestTryLoadConfig_UnknownKeys(t *testing.T) { - t.Parallel() - - tests := []struct { - configPath string - unknownMsg string - }{ - { - configPath: "./fixtures/unknown-key-1.toml", - unknownMsg: "IgnoredVulns.ignoreUntilTime", - }, - { - configPath: "./fixtures/unknown-key-2.toml", - unknownMsg: "IgnoredVulns.ignoreUntiI", - }, - { - configPath: "./fixtures/unknown-key-3.toml", - unknownMsg: "IgnoredVulns.reasoning", - }, - { - configPath: "./fixtures/unknown-key-4.toml", - unknownMsg: "PackageOverrides.skip", - }, - { - configPath: "./fixtures/unknown-key-5.toml", - unknownMsg: "PackageOverrides.license.skip", - }, - { - configPath: "./fixtures/unknown-key-6.toml", - unknownMsg: "RustVersionOverride", - }, - { - configPath: "./fixtures/unknown-key-7.toml", - unknownMsg: "RustVersionOverride, PackageOverrides.skip", - }, - } - - for _, testData := range tests { - c, err := tryLoadConfig(testData.configPath) - - // we should always be returning an empty config on error - if diff := cmp.Diff(Config{}, c); diff != "" { - t.Errorf("tryLoadConfig() mismatch (-want +got):\n%s", diff) - } - if err == nil { - t.Fatal("tryLoadConfig() did not return an error") - } - - wantMsg := fmt.Sprintf("unknown keys in config file: %v", testData.unknownMsg) - - if err.Error() != wantMsg { - t.Errorf("tryLoadConfig() error = '%v', want '%s'", err, wantMsg) - } - } -} - -func TestConfig_ShouldIgnore(t *testing.T) { - t.Parallel() - - type args struct { - vulnID string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry IgnoreEntry - }{ - // entry exists - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Time{}, - Reason: "", - }, - }, - }, - args: args{ - vulnID: "GHSA-123", - }, - wantOk: true, - wantEntry: IgnoreEntry{ - ID: "GHSA-123", - IgnoreUntil: time.Time{}, - Reason: "", - }, - }, - // entry does not exist - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Time{}, - Reason: "", - }, - }, - }, - args: args{ - vulnID: "nonexistent", - }, - wantOk: false, - wantEntry: IgnoreEntry{}, - }, - // ignored until a time in the past - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(-time.Hour).Round(time.Second), - Reason: "", - }, - }, - }, - args: args{ - vulnID: "GHSA-123", - }, - wantOk: false, - wantEntry: IgnoreEntry{ - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(-time.Hour).Round(time.Second), - Reason: "", - }, - }, - // ignored until a time in the future - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(time.Hour).Round(time.Second), - Reason: "", - }, - }, - }, - args: args{ - vulnID: "GHSA-123", - }, - wantOk: true, - wantEntry: IgnoreEntry{ - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(time.Hour).Round(time.Second), - Reason: "", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldIgnore(tt.args.vulnID) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnore() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldIgnore() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldIgnorePackage(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - config Config - args models.PackageVulns - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Everything-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - // ------------------------------------------------------------------------- - { - name: "Ecosystem-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Ecosystem-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - DepGroups: []string{"dev"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Group-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Group-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - DepGroups: []string{"optional"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Group-level entry exists and does not match when empty", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Version-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Version: "1.0.0", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Version: "1.0.0", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Version-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Version: "1.0.0", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Name-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - DepGroups: []string{"dev"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Name, Version, and Ecosystem entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name and Ecosystem entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name, Ecosystem, and Group entry exists and matches", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name, Ecosystem, and Group entry exists but does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"prod"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "2.0.0", - Ecosystem: "Go", - Ignore: false, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - { - Name: "lib2", - Version: "2.0.0", - Ignore: true, - Ecosystem: "Go", - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "2.0.0", - Ecosystem: "Go", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldIgnorePackage(tt.args) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnorePackage() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldIgnorePackage() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldIgnorePackageVersion(t *testing.T) { - t.Parallel() - - type args struct { - name string - version string - ecosystem string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Version-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Package-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "2.0.0", - Ecosystem: "Go", - Ignore: false, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - { - Name: "lib2", - Version: "2.0.0", - Ignore: true, - Ecosystem: "Go", - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "2.0.0", - ecosystem: "Go", - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldIgnorePackageVersion(tt.args.name, tt.args.version, tt.args.ecosystem) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnorePackageVersion() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldIgnorePackageVersion() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldIgnorePackageVulnerabilities(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - config Config - args models.PackageVulns - wantOk bool - }{ - { - name: "Exact version entry exists with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Vulnerability: Vulnerability{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - }, - { - name: "Version entry doesn't exist with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Vulnerability: Vulnerability{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: false, - }, - { - name: "Name matches with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Vulnerability: Vulnerability{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk := tt.config.ShouldIgnorePackageVulnerabilities(tt.args) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnorePackageVulnerabilities() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - }) - } -} - -func TestConfig_ShouldOverridePackageLicense(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - config Config - args models.PackageVulns - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Exact version entry exists with override", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - { - name: "Exact version entry exists with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - { - name: "Version entry doesn't exist with override", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Version entry doesn't exist with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Name matches with override", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - { - name: "Name matches with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldOverridePackageLicense(tt.args) - if gotOk != tt.wantOk { - t.Errorf("ShouldOverridePackageLicense() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldOverridePackageLicense() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldOverridePackageVersionLicense(t *testing.T) { - t.Parallel() - - type args struct { - name string - version string - ecosystem string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Exact version entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - { - name: "Version entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.1", - ecosystem: "Go", - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Name matches", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.1", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldOverridePackageVersionLicense(tt.args.name, tt.args.version, tt.args.ecosystem) - if gotOk != tt.wantOk { - t.Errorf("ShouldOverridePackageVersionLicense() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldOverridePackageVersionLicense() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} diff --git a/pkg/config/fixtures/testdatainner/innerFolder/test.yaml b/pkg/config/fixtures/testdatainner/innerFolder/test.yaml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pkg/config/fixtures/testdatainner/osv-scanner-load-path.toml b/pkg/config/fixtures/testdatainner/osv-scanner-load-path.toml deleted file mode 100644 index 001548b76d..0000000000 --- a/pkg/config/fixtures/testdatainner/osv-scanner-load-path.toml +++ /dev/null @@ -1 +0,0 @@ -LoadPath = "a/b/c" diff --git a/pkg/config/fixtures/testdatainner/osv-scanner.toml b/pkg/config/fixtures/testdatainner/osv-scanner.toml deleted file mode 100644 index f9be2c0f2e..0000000000 --- a/pkg/config/fixtures/testdatainner/osv-scanner.toml +++ /dev/null @@ -1,25 +0,0 @@ -[[IgnoredVulns]] -id = "GO-2022-0968" -# ignoreUntil = 2022-11-09 -# reason = "" # Optional reason - -[[IgnoredVulns]] -id = "GO-2022-1059" -# ignoreUntil = 2022-11-09 # Optional exception expiry date -# reason = "" # Optional reason - -[[PackageOverrides]] -name = "lib" -version = "1.0.0" -ecosystem = "Go" -ignore = true -# effectiveUntil = 2022-11-09 # Optional exception expiry date -reason = "abc" - -[[PackageOverrides]] -name = "my-pkg" -version = "1.0.0" -ecosystem = "Go" -ignore = true -reason = "abc" -license.override = ["MIT", "0BSD"] diff --git a/pkg/config/fixtures/testdatainner/some-manifest.yaml b/pkg/config/fixtures/testdatainner/some-manifest.yaml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pkg/config/fixtures/unknown-key-1.toml b/pkg/config/fixtures/unknown-key-1.toml deleted file mode 100644 index 2c8538325b..0000000000 --- a/pkg/config/fixtures/unknown-key-1.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[IgnoredVulns]] -id = "GHSA-jgvc-jfgh-rjvv" -ignoreUntilTime = 2024-08-02 # whoops, should be "ignoreUntil" -reason = "..." diff --git a/pkg/config/fixtures/unknown-key-2.toml b/pkg/config/fixtures/unknown-key-2.toml deleted file mode 100644 index 7b6d964f43..0000000000 --- a/pkg/config/fixtures/unknown-key-2.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[IgnoredVulns]] -id = "GHSA-jgvc-jfgh-rjvv" -ignoreUntiI = 2024-08-02 # whoops, should be "ignoreUntil" -reason = "..." diff --git a/pkg/config/fixtures/unknown-key-3.toml b/pkg/config/fixtures/unknown-key-3.toml deleted file mode 100644 index bce7ed9a19..0000000000 --- a/pkg/config/fixtures/unknown-key-3.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[IgnoredVulns]] -id = "GHSA-jgvc-jfgh-rjvv" -ignoreUntil = 2024-08-02 -reasoning = "..." # whoops, should be "reason" diff --git a/pkg/config/fixtures/unknown-key-4.toml b/pkg/config/fixtures/unknown-key-4.toml deleted file mode 100644 index f508c89dd1..0000000000 --- a/pkg/config/fixtures/unknown-key-4.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[PackageOverrides]] -ecosystem = "npm" -skip = true # whoops, should be "ignore" -license.override = ["0BSD"] diff --git a/pkg/config/fixtures/unknown-key-5.toml b/pkg/config/fixtures/unknown-key-5.toml deleted file mode 100644 index d1d832aed0..0000000000 --- a/pkg/config/fixtures/unknown-key-5.toml +++ /dev/null @@ -1,3 +0,0 @@ -[[PackageOverrides]] -ecosystem = "npm" -license.skip = false # whoops, should be "license.ignore" diff --git a/pkg/config/fixtures/unknown-key-6.toml b/pkg/config/fixtures/unknown-key-6.toml deleted file mode 100644 index 80f0b87eee..0000000000 --- a/pkg/config/fixtures/unknown-key-6.toml +++ /dev/null @@ -1 +0,0 @@ -RustVersionOverride = "1.2.3" # whoops, not supported diff --git a/pkg/config/fixtures/unknown-key-7.toml b/pkg/config/fixtures/unknown-key-7.toml deleted file mode 100644 index 044156ccec..0000000000 --- a/pkg/config/fixtures/unknown-key-7.toml +++ /dev/null @@ -1,5 +0,0 @@ -RustVersionOverride = "1.2.3" # whoops, not supported - -[[PackageOverrides]] -ecosystem = "npm" -skip = true # whoops, should be "ignore" diff --git a/pkg/depsdev/license.go b/pkg/depsdev/license.go deleted file mode 100644 index 67fc3398e8..0000000000 --- a/pkg/depsdev/license.go +++ /dev/null @@ -1,125 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package depsdev - -import ( - "context" - "crypto/x509" - "fmt" - - "github.com/google/osv-scanner/pkg/lockfile" - "github.com/google/osv-scanner/pkg/models" - "github.com/google/osv-scanner/pkg/osv" - - depsdevpb "deps.dev/api/v3" - "golang.org/x/sync/errgroup" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/credentials" - "google.golang.org/grpc/status" -) - -// DepsdevAPI is the URL to the deps.dev API. It is documented at -// docs.deps.dev/api. -// -// Deprecated: this is now private and should not be used outside the scanner -const DepsdevAPI = "api.deps.dev:443" - -// System maps from a lockfile system to the depsdev API system. -// -// Deprecated: this is now private and should not be used outside the scanner -var System = map[lockfile.Ecosystem]depsdevpb.System{ - lockfile.NpmEcosystem: depsdevpb.System_NPM, - lockfile.NuGetEcosystem: depsdevpb.System_NUGET, - lockfile.CargoEcosystem: depsdevpb.System_CARGO, - lockfile.GoEcosystem: depsdevpb.System_GO, - lockfile.MavenEcosystem: depsdevpb.System_MAVEN, - lockfile.PipEcosystem: depsdevpb.System_PYPI, -} - -// VersionQuery constructs a GetVersion request from the arguments. -// -// Deprecated: this is now private and should not be used outside the scanner -func VersionQuery(system depsdevpb.System, name string, version string) *depsdevpb.GetVersionRequest { - if system == depsdevpb.System_GO { - version = "v" + version - } - - return &depsdevpb.GetVersionRequest{ - VersionKey: &depsdevpb.VersionKey{ - System: system, - Name: name, - Version: version, - }, - } -} - -// MakeVersionRequests wraps MakeVersionRequestsWithContext using context.Background. -// -// Deprecated: this is now private and should not be used outside the scanner -func MakeVersionRequests(queries []*depsdevpb.GetVersionRequest) ([][]models.License, error) { - return MakeVersionRequestsWithContext(context.Background(), queries) -} - -// MakeVersionRequestsWithContext calls the deps.dev GetVersion gRPC API endpoint for each -// query. It makes these requests concurrently, sharing the single HTTP/2 -// connection. The order in which the requests are specified should correspond -// to the order of licenses returned by this function. -// -// Deprecated: this is now private and should not be used outside the scanner -func MakeVersionRequestsWithContext(ctx context.Context, queries []*depsdevpb.GetVersionRequest) ([][]models.License, error) { - certPool, err := x509.SystemCertPool() - if err != nil { - return nil, fmt.Errorf("getting system cert pool: %w", err) - } - creds := credentials.NewClientTLSFromCert(certPool, "") - dialOpts := []grpc.DialOption{grpc.WithTransportCredentials(creds)} - - if osv.RequestUserAgent != "" { - dialOpts = append(dialOpts, grpc.WithUserAgent(osv.RequestUserAgent)) - } - - conn, err := grpc.NewClient(DepsdevAPI, dialOpts...) - if err != nil { - return nil, fmt.Errorf("dialing deps.dev gRPC API: %w", err) - } - client := depsdevpb.NewInsightsClient(conn) - - licenses := make([][]models.License, len(queries)) - g, ctx := errgroup.WithContext(ctx) - for i := range queries { - if queries[i] == nil { - // This may be a private package. - licenses[i] = []models.License{models.License("UNKNOWN")} - continue - } - g.Go(func() error { - resp, err := client.GetVersion(ctx, queries[i]) - if err != nil { - if status.Code(err) == codes.NotFound { - licenses[i] = append(licenses[i], "UNKNOWN") - return nil - } - - return err - } - ls := make([]models.License, len(resp.GetLicenses())) - for j, license := range resp.GetLicenses() { - ls[j] = models.License(license) - } - if len(ls) == 0 { - // The deps.dev API will return an - // empty slice if the license is - // unknown. - ls = []models.License{models.License("UNKNOWN")} - } - licenses[i] = ls - - return nil - }) - } - if err := g.Wait(); err != nil { - return nil, err - } - - return licenses, nil -} diff --git a/pkg/grouper/grouper.go b/pkg/grouper/grouper.go deleted file mode 100644 index c64399915a..0000000000 --- a/pkg/grouper/grouper.go +++ /dev/null @@ -1,76 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package grouper - -import ( - "slices" - "sort" - - "golang.org/x/exp/maps" - - "github.com/google/osv-scanner/internal/identifiers" - "github.com/google/osv-scanner/pkg/models" -) - -func hasAliasIntersection(v1, v2 IDAliases) bool { - // Check if any aliases intersect. - for _, alias := range v1.Aliases { - if slices.Contains(v2.Aliases, alias) { - return true - } - } - // Check if either IDs are in the others' aliases. - return slices.Contains(v1.Aliases, v2.ID) || slices.Contains(v2.Aliases, v1.ID) -} - -// Group groups vulnerabilities by aliases. -// -// Deprecated: this is now private and should not be used outside the scanner -func Group(vulns []IDAliases) []models.GroupInfo { - // Mapping of `vulns` index to a group ID. A group ID is just another index in the `vulns` slice. - groups := make([]int, len(vulns)) - - // Initially make every vulnerability its own group. - for i := range vulns { - groups[i] = i - } - - // Do a pair-wise (n^2) comparison and merge all intersecting vulns. - for i := range vulns { - for j := i + 1; j < len(vulns); j++ { - if hasAliasIntersection(vulns[i], vulns[j]) { - // Merge the two groups. Use the smaller index as the representative ID. - groups[i] = min(groups[i], groups[j]) - groups[j] = groups[i] - } - } - } - - // Extract groups into the final result structure. - extractedGroups := map[int][]string{} - extractedAliases := map[int][]string{} - for i, gid := range groups { - extractedGroups[gid] = append(extractedGroups[gid], vulns[i].ID) - extractedAliases[gid] = append(extractedAliases[gid], vulns[i].Aliases...) - } - - // Sort by group ID to maintain stable order for tests. - sortedKeys := maps.Keys(extractedGroups) - sort.Ints(sortedKeys) - - result := make([]models.GroupInfo, 0, len(sortedKeys)) - for _, key := range sortedKeys { - // Sort the strings so they are always in the same order - slices.SortFunc(extractedGroups[key], identifiers.IDSortFunc) - - // Add IDs to aliases - extractedAliases[key] = append(extractedAliases[key], extractedGroups[key]...) - - // Dedup entries - sort.Strings(extractedAliases[key]) - extractedAliases[key] = slices.Compact(extractedAliases[key]) - - result = append(result, models.GroupInfo{IDs: extractedGroups[key], Aliases: extractedAliases[key]}) - } - - return result -} diff --git a/pkg/grouper/grouper_models.go b/pkg/grouper/grouper_models.go deleted file mode 100644 index 1b759e74e3..0000000000 --- a/pkg/grouper/grouper_models.go +++ /dev/null @@ -1,36 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package grouper - -import ( - "strings" - - "github.com/google/osv-scanner/pkg/models" -) - -// Deprecated: this is now private and should not be used outside the scanner -type IDAliases struct { - ID string - Aliases []string -} - -// Deprecated: this is now private and should not be used outside the scanner -func ConvertVulnerabilityToIDAliases(c []models.Vulnerability) []IDAliases { - output := []IDAliases{} - for _, v := range c { - idAliases := IDAliases{ - ID: v.ID, - Aliases: v.Aliases, - } - - // For Debian Security Advisory data, - // all related CVEs should be bundled together, as they are part of this DSA. - // TODO(gongh@): Revisit and provide a universal way to handle all Linux distro advisories. - if strings.Split(v.ID, "-")[0] == "DSA" { - idAliases.Aliases = append(idAliases.Aliases, v.Related...) - } - - output = append(output, idAliases) - } - - return output -} diff --git a/pkg/grouper/grouper_test.go b/pkg/grouper/grouper_test.go deleted file mode 100644 index 596646f4fc..0000000000 --- a/pkg/grouper/grouper_test.go +++ /dev/null @@ -1,155 +0,0 @@ -package grouper_test - -import ( - "testing" - - "github.com/google/osv-scanner/pkg/grouper" - - "github.com/google/go-cmp/cmp" - "github.com/google/osv-scanner/pkg/models" -) - -func TestGroup(t *testing.T) { - t.Parallel() - - // Should be grouped by IDs appearing in alias. - v1 := grouper.IDAliases{ - ID: "CVE-1", - Aliases: []string{ - "FOO-1", - }, - } - v2 := grouper.IDAliases{ - ID: "FOO-1", - Aliases: []string{}, - } - v3 := grouper.IDAliases{ - ID: "FOO-2", - Aliases: []string{ - "FOO-1", - }, - } - - // Should be grouped by aliases intersecting. - v4 := grouper.IDAliases{ - ID: "BAR-1", - Aliases: []string{ - "CVE-2", - "CVE-3", - }, - } - v5 := grouper.IDAliases{ - ID: "BAR-2", - Aliases: []string{ - "CVE-3", - "CVE-4", - }, - } - v6 := grouper.IDAliases{ - ID: "BAR-3", - Aliases: []string{ - "CVE-4", - }, - } - - // Unrelated. - v7 := grouper.IDAliases{ - ID: "UNRELATED-1", - Aliases: []string{ - "BAR-1337", - }, - } - v8 := grouper.IDAliases{ - ID: "UNRELATED-2", - Aliases: []string{ - "BAR-1338", - }, - } - - // Unrelated, empty aliases - v9 := grouper.IDAliases{ - ID: "UNRELATED-3", - } - v10 := grouper.IDAliases{ - ID: "UNRELATED-4", - } - for _, tc := range []struct { - vulns []grouper.IDAliases - want []models.GroupInfo - }{ - { - vulns: []grouper.IDAliases{ - v1, v2, v3, v4, v5, v6, v7, v8, - }, - want: []models.GroupInfo{ - { - IDs: []string{v1.ID, v2.ID, v3.ID}, - Aliases: []string{v1.ID, v2.ID, v3.ID}, - }, - { - IDs: []string{v4.ID, v5.ID, v6.ID}, - Aliases: []string{v4.ID, v5.ID, v6.ID, v4.Aliases[0], v4.Aliases[1], v5.Aliases[1]}, - }, - { - IDs: []string{v7.ID}, - Aliases: []string{v7.Aliases[0], v7.ID}, - }, - { - IDs: []string{v8.ID}, - Aliases: []string{v8.Aliases[0], v8.ID}, - }, - }, - }, - { - vulns: []grouper.IDAliases{ - v8, v2, v1, v5, v7, v4, v6, v3, v9, v10, - }, - want: []models.GroupInfo{ - { - IDs: []string{v8.ID}, - Aliases: []string{v8.Aliases[0], v8.ID}, - }, - { - IDs: []string{v1.ID, v2.ID, v3.ID}, // Deterministic order - Aliases: []string{v1.ID, v2.ID, v3.ID}, // Deterministic order - }, - { - IDs: []string{v4.ID, v5.ID, v6.ID}, - Aliases: []string{v4.ID, v5.ID, v6.ID, v4.Aliases[0], v4.Aliases[1], v5.Aliases[1]}, - }, - { - IDs: []string{v7.ID}, - Aliases: []string{v7.Aliases[0], v7.ID}, - }, - { - IDs: []string{v9.ID}, - Aliases: []string{v9.ID}, - }, - { - IDs: []string{v10.ID}, - Aliases: []string{v10.ID}, - }, - }, - }, - { - vulns: []grouper.IDAliases{ - v9, v10, - }, - want: []models.GroupInfo{ - { - IDs: []string{v9.ID}, - Aliases: []string{v9.ID}, - }, - { - IDs: []string{v10.ID}, - Aliases: []string{v10.ID}, - }, - }, - }, - } { - grouped := grouper.Group(tc.vulns) - if diff := cmp.Diff(tc.want, grouped); diff != "" { - t.Errorf("GroupedVulns() returned an unexpected result (-want +got):\n%s", diff) - } - } -} diff --git a/pkg/lockfile/fixtures/pnpm/invalid-package-path.yaml b/pkg/lockfile/fixtures/pnpm/invalid-package-path.yaml new file mode 100644 index 0000000000..099ce241e2 --- /dev/null +++ b/pkg/lockfile/fixtures/pnpm/invalid-package-path.yaml @@ -0,0 +1,92 @@ +lockfileVersion: 5.4 + +specifiers: + '@types/jsdom': ^20.0.1 + axios: ^1.2.5 + pinia: ^2.0.28 + stream: 0.0.2 + typescript: ~4.7.4 + +dependencies: + axios: 1.2.5 + pinia: 2.0.28_e7lp6ggkpgyi5vqd44m2kxvk6i + stream: 0.0.2 + +devDependencies: + '@types/jsdom': 20.0.1 + npm-run-all: 4.1.5 + +packages: + + /@babel/helper-string-parser/7.19.4: + resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==} + engines: {node: '>=6.9.0'} + + /@babel/helper-validator-identifier/7.19.1: + resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} + engines: {node: '>=6.9.0'} + + /@babel/parser/7.20.7: + resolution: {integrity: sha512-T3Z9oHybU+0vZlY9CiDSJQTD5ZapcW18ZctFMi0MOAl/4BjFF4ul7NVSARLdbGO5vDqy9eQiGTV0LtKfvCYvcg==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.20.7 + + /@types/jsdom/20.0.1: + resolution: {integrity: sha512-d0r18sZPmMQr1eG35u12FZfhIXNrnsPU/g5wvRKCUf/tOGilKKwYMYGqh33BNR6ba+2gkHw1EUiHoN3mn7E5IQ==} + dependencies: + '@types/node': 18.11.18 + '@types/tough-cookie': 4.0.2 + parse5: 7.1.2 + dev: true + + axios@1.2.5: + resolution: {integrity: sha512-9pU/8mmjSSOb4CXVsvGIevN+MlO/t9OWtKadTaLuN85Gge3HGorUckgp8A/2FH4V4hJ7JuQ3LIeI7KAV9ITZrQ==} + dependencies: + follow-redirects: 1.15.2 + form-data: 4.0.0 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + dev: false + + /stream/0.0.2: + resolution: {integrity: sha512-gCq3NDI2P35B2n6t76YJuOp7d6cN/C7Rt0577l91wllh0sY9ZBuw9KaSGqH/b0hzn3CWWJbpbW0W0WvQ1H/Q7g==} + dependencies: + emitter-component: 1.1.1 + dev: false + + /typescript/4.7.4: + resolution: {integrity: sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==} + engines: {node: '>=4.2.0'} + hasBin: true + + /pinia/2.0.28_e7lp6ggkpgyi5vqd44m2kxvk6i: + resolution: {integrity: sha512-YClq9DkqCblq9rlyUual7ezMu/iICWdBtfJrDt4oWU9Zxpijyz7xB2xTwx57DaBQ96UGvvTMORzALr+iO5PVMw==} + peerDependencies: + '@vue/composition-api': ^1.4.0 + typescript: '>=4.4.4' + vue: ^2.6.14 || ^3.2.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + typescript: + optional: true + dependencies: + '@vue/devtools-api': 6.4.5 + typescript: 4.7.4 + vue: 3.2.45 + vue-demi: 0.13.11_vue@3.2.45 + dev: false + + /http-proxy-agent/5.0.0: + resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} + engines: {node: '>= 6'} + dependencies: + '@tootallnate/once': 2.0.0 + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true \ No newline at end of file diff --git a/pkg/lockfile/parse-pnpm-lock.go b/pkg/lockfile/parse-pnpm-lock.go index bb220706de..545450fa5d 100644 --- a/pkg/lockfile/parse-pnpm-lock.go +++ b/pkg/lockfile/parse-pnpm-lock.go @@ -65,12 +65,12 @@ func startsWithNumber(str string) bool { // extractPnpmPackageNameAndVersion parses a dependency path, attempting to // extract the name and version of the package it represents -func extractPnpmPackageNameAndVersion(dependencyPath string, lockfileVersion float64) (string, string) { +func extractPnpmPackageNameAndVersion(dependencyPath string, lockfileVersion float64) (string, string, error) { // file dependencies must always have a name property to be installed, // and their dependency path never has the version encoded, so we can // skip trying to extract either from their dependency path if strings.HasPrefix(dependencyPath, "file:") { - return "", "" + return "", "", nil } // v9.0 specifies the dependencies as @ rather than as a path @@ -84,10 +84,15 @@ func extractPnpmPackageNameAndVersion(dependencyPath string, lockfileVersion flo name = "@" + name } - return name, version + return name, version, nil } parts := strings.Split(dependencyPath, "/") + + if len(parts) == 1 { + return "", "", errors.New("invalid package path") + } + var name string parts = parts[1:] @@ -111,7 +116,7 @@ func extractPnpmPackageNameAndVersion(dependencyPath string, lockfileVersion flo } if version == "" || !startsWithNumber(version) { - return "", "" + return "", "", nil } underscoreIndex := strings.Index(version, "_") @@ -120,7 +125,7 @@ func extractPnpmPackageNameAndVersion(dependencyPath string, lockfileVersion flo version = strings.Split(version, "_")[0] } - return name, version + return name, version, nil } func parseNameAtVersion(value string) (name string, version string) { @@ -134,11 +139,15 @@ func parseNameAtVersion(value string) (name string, version string) { return matches[1], matches[2] } -func parsePnpmLock(lockfile PnpmLockfile) []PackageDetails { +func parsePnpmLock(lockfile PnpmLockfile) ([]PackageDetails, error) { packages := make([]PackageDetails, 0, len(lockfile.Packages)) for s, pkg := range lockfile.Packages { - name, version := extractPnpmPackageNameAndVersion(s, lockfile.Version) + name, version, err := extractPnpmPackageNameAndVersion(s, lockfile.Version) + + if err != nil { + return nil, err + } // "name" is only present if it's not in the dependency path and takes // priority over whatever name we think we've extracted (if any) @@ -182,7 +191,7 @@ func parsePnpmLock(lockfile PnpmLockfile) []PackageDetails { }) } - return packages + return packages, nil } type PnpmLockExtractor struct{} @@ -205,7 +214,12 @@ func (e PnpmLockExtractor) Extract(f DepFile) ([]PackageDetails, error) { parsedLockfile = &PnpmLockfile{} } - return parsePnpmLock(*parsedLockfile), nil + packageDetails, err := parsePnpmLock(*parsedLockfile) + if err != nil { + return []PackageDetails{}, fmt.Errorf("could not extract from %s: %w", f.Path(), err) + } + + return packageDetails, nil } var _ Extractor = PnpmLockExtractor{} diff --git a/pkg/lockfile/parse-pnpm-lock_test.go b/pkg/lockfile/parse-pnpm-lock_test.go index a08c11596f..bee7c6b4aa 100644 --- a/pkg/lockfile/parse-pnpm-lock_test.go +++ b/pkg/lockfile/parse-pnpm-lock_test.go @@ -586,3 +586,12 @@ func TestParsePnpmLock_Files(t *testing.T) { }, }) } + +func TestParsePnpmLock_InvalidPackagePath(t *testing.T) { + t.Parallel() + + packages, err := lockfile.ParsePnpmLock("fixtures/pnpm/invalid-package-path.yaml") + + expectErrContaining(t, err, "invalid package path") + expectPackages(t, packages, []lockfile.PackageDetails{}) +} diff --git a/pkg/osvscanner/osvscanner.go b/pkg/osvscanner/osvscanner.go index c4c9c929cf..074f00613e 100644 --- a/pkg/osvscanner/osvscanner.go +++ b/pkg/osvscanner/osvscanner.go @@ -2,6 +2,8 @@ package osvscanner import ( "bufio" + "cmp" + "context" "crypto/md5" //nolint:gosec "errors" "fmt" @@ -11,15 +13,21 @@ import ( "path" "path/filepath" "slices" - "sort" "strings" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem/os/apk" + "github.com/google/osv-scalibr/extractor/filesystem/os/dpkg" + scalibrosv "github.com/google/osv-scalibr/extractor/filesystem/osv" + "github.com/google/osv-scanner/internal/config" "github.com/google/osv-scanner/internal/customgitignore" "github.com/google/osv-scanner/internal/depsdev" "github.com/google/osv-scanner/internal/image" "github.com/google/osv-scanner/internal/local" - "github.com/google/osv-scanner/internal/manifest" + "github.com/google/osv-scanner/internal/lockfilescalibr" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/java/pomxmlnet" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/osv/osvscannerjson" "github.com/google/osv-scanner/internal/output" "github.com/google/osv-scanner/internal/resolution/client" "github.com/google/osv-scanner/internal/resolution/datasource" @@ -37,16 +45,16 @@ import ( ) type ScannerActions struct { - LockfilePaths []string - SBOMPaths []string - DirectoryPaths []string - GitCommits []string - Recursive bool - SkipGit bool - NoIgnore bool - DockerContainerNames []string - ConfigOverridePath string - CallAnalysisStates map[string]bool + LockfilePaths []string + SBOMPaths []string + DirectoryPaths []string + GitCommits []string + Recursive bool + SkipGit bool + NoIgnore bool + DockerImageName string + ConfigOverridePath string + CallAnalysisStates map[string]bool ExperimentalScannerActions } @@ -64,6 +72,7 @@ type ExperimentalScannerActions struct { } type TransitiveScanningActions struct { + Disabled bool NativeDataSource bool MavenRegistry string } @@ -170,17 +179,19 @@ func scanDir(r reporter.Reporter, dir string, skipGit bool, recursive bool, useG } if !info.IsDir() { - if extractor, _ := lockfile.FindExtractor(path, ""); extractor != nil { - pkgs, err := scanLockfile(r, path, "", compareOffline, transitiveAct) - if err != nil { + pkgs, err := scanLockfile(r, path, "", transitiveAct) + if err != nil { + // If no extractors found then just continue + if !errors.Is(err, lockfilescalibr.ErrNoExtractorsFound) { r.Errorf("Attempted to scan lockfile but failed: %s\n", path) } - scannedPackages = append(scannedPackages, pkgs...) } + scannedPackages = append(scannedPackages, pkgs...) + // No need to check for error // If scan fails, it means it isn't a valid SBOM file, // so just move onto the next file - pkgs, _ := scanSBOMFile(r, path, true) + pkgs, _ = scanSBOMFile(r, path, true) scannedPackages = append(scannedPackages, pkgs...) } @@ -353,29 +364,31 @@ func scanImage(r reporter.Reporter, path string) ([]scannedPackage, error) { // scanLockfile will load, identify, and parse the lockfile path passed in, and add the dependencies specified // within to `query` -func scanLockfile(r reporter.Reporter, path string, parseAs string, compareOffline bool, transitiveAct TransitiveScanningActions) ([]scannedPackage, error) { +func scanLockfile(r reporter.Reporter, path string, parseAs string, transitiveAct TransitiveScanningActions) ([]scannedPackage, error) { var err error - var parsedLockfile lockfile.Lockfile - - f, err := lockfile.OpenLocalDepFile(path) - - if err == nil { - // special case for the APK and DPKG parsers because they have a very generic name while - // living at a specific location, so they are not included in the map of parsers - // used by lockfile.Parse to avoid false-positives when scanning projects - switch parseAs { - case "apk-installed": - parsedLockfile, err = lockfile.FromApkInstalled(path) - case "dpkg-status": - parsedLockfile, err = lockfile.FromDpkgStatus(path) - case "osv-scanner": - parsedLockfile, err = lockfile.FromOSVScannerResults(path) - default: - if !compareOffline && (parseAs == "pom.xml" || filepath.Base(path) == "pom.xml") { - parsedLockfile, err = extractMavenDeps(f, transitiveAct) - } else { - parsedLockfile, err = lockfile.ExtractDeps(f, parseAs) + + var inventories []*extractor.Inventory + + // special case for the APK and DPKG parsers because they have a very generic name while + // living at a specific location, so they are not included in the map of parsers + // used by lockfile.Parse to avoid false-positives when scanning projects + switch parseAs { + case "apk-installed": + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, apk.New(apk.DefaultConfig())) + case "dpkg-status": + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, dpkg.New(dpkg.DefaultConfig())) + case "osv-scanner": + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, osvscannerjson.Extractor{}) + default: + if !transitiveAct.Disabled && (parseAs == "pom.xml" || filepath.Base(path) == "pom.xml") { + ext, extErr := createMavenExtractor(transitiveAct) + if extErr != nil { + return nil, extErr } + + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, ext) + } else { + inventories, err = lockfilescalibr.Extract(context.Background(), path, parseAs) } } @@ -389,33 +402,57 @@ func scanLockfile(r reporter.Reporter, path string, parseAs string, compareOffli parsedAsComment = fmt.Sprintf("as a %s ", parseAs) } + slices.SortFunc(inventories, func(i, j *extractor.Inventory) int { + return cmp.Or( + strings.Compare(i.Name, j.Name), + strings.Compare(i.Version, j.Version), + ) + }) + + pkgCount := len(inventories) + r.Infof( "Scanned %s file %sand found %d %s\n", path, parsedAsComment, - len(parsedLockfile.Packages), - output.Form(len(parsedLockfile.Packages), "package", "packages"), + pkgCount, + output.Form(pkgCount, "package", "packages"), ) - packages := make([]scannedPackage, len(parsedLockfile.Packages)) - for i, pkgDetail := range parsedLockfile.Packages { - packages[i] = scannedPackage{ - Name: pkgDetail.Name, - Version: pkgDetail.Version, - Commit: pkgDetail.Commit, - Ecosystem: pkgDetail.Ecosystem, - DepGroups: pkgDetail.DepGroups, + packages := make([]scannedPackage, 0, pkgCount) + + for _, inv := range inventories { + scannedPackage := scannedPackage{ + Name: inv.Name, + Version: inv.Version, Source: models.SourceInfo{ Path: path, Type: "lockfile", }, } + if inv.SourceCode != nil { + scannedPackage.Commit = inv.SourceCode.Commit + } + eco := inv.Ecosystem() + // TODO(rexpan): Refactor these minor patches to individual items + // TODO: Ecosystem should be pared with Enum : Suffix + if eco == "Alpine" { + eco = "Alpine:v3.20" + } + + scannedPackage.Ecosystem = lockfile.Ecosystem(eco) + + if dg, ok := inv.Metadata.(scalibrosv.DepGroups); ok { + scannedPackage.DepGroups = dg.DepGroups() + } + + packages = append(packages, scannedPackage) } return packages, nil } -func extractMavenDeps(f lockfile.DepFile, actions TransitiveScanningActions) (lockfile.Lockfile, error) { +func createMavenExtractor(actions TransitiveScanningActions) (*pomxmlnet.Extractor, error) { var depClient client.DependencyClient var err error if actions.NativeDataSource { @@ -424,37 +461,20 @@ func extractMavenDeps(f lockfile.DepFile, actions TransitiveScanningActions) (lo depClient, err = client.NewDepsDevClient(depsdev.DepsdevAPI) } if err != nil { - return lockfile.Lockfile{}, err + return nil, err } mavenClient, err := datasource.NewMavenRegistryAPIClient(actions.MavenRegistry) if err != nil { - return lockfile.Lockfile{}, err + return nil, err } - extractor := manifest.MavenResolverExtractor{ + extractor := pomxmlnet.Extractor{ DependencyClient: depClient, MavenRegistryAPIClient: mavenClient, } - packages, err := extractor.Extract(f) - if err != nil { - err = fmt.Errorf("failed extracting %s: %w", f.Path(), err) - } - // Sort packages for testing convenience. - sort.Slice(packages, func(i, j int) bool { - if packages[i].Name == packages[j].Name { - return packages[i].Version < packages[j].Version - } - - return packages[i].Name < packages[j].Name - }) - - return lockfile.Lockfile{ - FilePath: f.Path(), - ParsedAs: "pom.xml", - Packages: packages, - }, err + return &extractor, nil } // scanSBOMFile will load, identify, and parse the SBOM path passed in, and add the dependencies specified @@ -643,78 +663,83 @@ func createCommitQueryPackage(commit string, source string) scannedPackage { } } -func scanDebianDocker(r reporter.Reporter, dockerImageName string) ([]scannedPackage, error) { - cmd := exec.Command("docker", "run", "--rm", "--entrypoint", "/usr/bin/dpkg-query", dockerImageName, "-f", "${Package}###${Version}\\n", "-W") - stdout, err := cmd.StdoutPipe() +func runCommandLogError(r reporter.Reporter, name string, args ...string) error { + cmd := exec.Command(name, args...) + // Get stderr for debugging when docker fails + stderr, err := cmd.StderrPipe() if err != nil { - r.Errorf("Failed to get stdout: %s\n", err) + r.Errorf("Failed to get stderr: %s\n", err) + return err + } + + err = cmd.Start() + if err != nil { + r.Errorf("Failed to run docker command (%q): %s\n", cmd.String(), err) + return err + } + // This has to be captured before cmd.Wait() is called, as cmd.Wait() closes the stderr pipe. + var stderrLines []string + scanner := bufio.NewScanner(stderr) + for scanner.Scan() { + stderrLines = append(stderrLines, scanner.Text()) + } + + err = cmd.Wait() + if err != nil { + r.Errorf("Docker command exited with code (%q): %d\nSTDERR:\n", cmd.String(), cmd.ProcessState.ExitCode()) + for _, line := range stderrLines { + r.Errorf("> %s\n", line) + } + + return errors.New("failed to run docker command") + } + + return nil +} + +func scanDockerImage(r reporter.Reporter, dockerImageName string) ([]scannedPackage, error) { + tempImageFile, err := os.CreateTemp("", "docker-image-*.tar") + if err != nil { + r.Errorf("Failed to create temporary file: %s\n", err) return nil, err } - stderr, err := cmd.StderrPipe() + err = tempImageFile.Close() if err != nil { - r.Errorf("Failed to get stderr: %s\n", err) return nil, err } + defer os.Remove(tempImageFile.Name()) - err = cmd.Start() + r.Infof("Pulling docker image (%q)...\n", dockerImageName) + err = runCommandLogError(r, "docker", "pull", "-q", dockerImageName) if err != nil { - r.Errorf("Failed to start docker image: %s\n", err) return nil, err } - defer func() { - var stderrlines []string - scanner := bufio.NewScanner(stderr) - for scanner.Scan() { - stderrlines = append(stderrlines, scanner.Text()) - } + r.Infof("Saving docker image (%q) to temporary file...\n", dockerImageName) + err = runCommandLogError(r, "docker", "save", "-o", tempImageFile.Name(), dockerImageName) + if err != nil { + return nil, err + } - err := cmd.Wait() - if err != nil { - r.Errorf("Docker command exited with code %d\n", cmd.ProcessState.ExitCode()) - for _, line := range stderrlines { - r.Errorf("> %s\n", line) - } - } - }() + r.Infof("Scanning image...\n") + packages, err := scanImage(r, tempImageFile.Name()) + if err != nil { + return nil, err + } - scanner := bufio.NewScanner(stdout) - var packages []scannedPackage - for scanner.Scan() { - text := scanner.Text() - text = strings.TrimSpace(text) - if len(text) == 0 { - continue - } - splitText := strings.Split(text, "###") - if len(splitText) != 2 { - r.Errorf("Unexpected output from Debian container: \n\n%s\n", text) - return nil, fmt.Errorf("unexpected output from Debian container: \n\n%s", text) - } - // TODO(rexpan): Get and specify exact debian release version - packages = append(packages, scannedPackage{ - Name: splitText[0], - Version: splitText[1], - Ecosystem: "Debian", - Source: models.SourceInfo{ - Path: dockerImageName, - Type: "docker", - }, - }) + // Modify the image path to be the image name, rather than the temporary file name + for i := range packages { + _, internalPath, _ := strings.Cut(packages[i].Source.Path, ":") + packages[i].Source.Path = dockerImageName + ":" + internalPath } - r.Infof( - "Scanned docker image with %d %s\n", - len(packages), - output.Form(len(packages), "package", "packages"), - ) return packages, nil } // Filters results according to config, preserving order. Returns total number of vulnerabilities removed. -func filterResults(r reporter.Reporter, results *models.VulnerabilityResults, configManager *config.ConfigManager, allPackages bool) int { +func filterResults(r reporter.Reporter, results *models.VulnerabilityResults, configManager *config.Manager, allPackages bool) int { removedCount := 0 unimportantCount := 0 newResults := []models.PackageSource{} // Want 0 vulnerabilities to show in JSON as an empty list, not null. @@ -868,7 +893,7 @@ func DoScan(actions ScannerActions, r reporter.Reporter) (models.VulnerabilityRe return models.VulnerabilityResults{}, errors.New("databases can only be downloaded when running in offline mode") } - configManager := config.ConfigManager{ + configManager := config.Manager{ DefaultConfig: config.Config{}, ConfigMap: make(map[string]config.Config), } @@ -894,9 +919,11 @@ func DoScan(actions ScannerActions, r reporter.Reporter) (models.VulnerabilityRe scannedPackages = append(scannedPackages, pkgs...) } - // TODO: Deprecated - for _, container := range actions.DockerContainerNames { - pkgs, _ := scanDebianDocker(r, container) + if actions.DockerImageName != "" { + pkgs, err := scanDockerImage(r, actions.DockerImageName) + if err != nil { + return models.VulnerabilityResults{}, err + } scannedPackages = append(scannedPackages, pkgs...) } @@ -907,7 +934,7 @@ func DoScan(actions ScannerActions, r reporter.Reporter) (models.VulnerabilityRe r.Errorf("Failed to resolved path with error %s\n", err) return models.VulnerabilityResults{}, err } - pkgs, err := scanLockfile(r, lockfilePath, parseAs, actions.CompareOffline, actions.TransitiveScanningActions) + pkgs, err := scanLockfile(r, lockfilePath, parseAs, actions.TransitiveScanningActions) if err != nil { return models.VulnerabilityResults{}, err } @@ -1032,7 +1059,7 @@ func filterUnscannablePackages(packages []scannedPackage) []scannedPackage { } // filterIgnoredPackages removes ignore scanned packages according to config. Returns filtered scanned packages. -func filterIgnoredPackages(r reporter.Reporter, packages []scannedPackage, configManager *config.ConfigManager) []scannedPackage { +func filterIgnoredPackages(r reporter.Reporter, packages []scannedPackage, configManager *config.Manager) []scannedPackage { out := make([]scannedPackage, 0, len(packages)) for _, p := range packages { configToUse := configManager.Get(r, p.Source.Path) @@ -1047,7 +1074,12 @@ func filterIgnoredPackages(r reporter.Reporter, packages []scannedPackage, confi } if ignore, ignoreLine := configToUse.ShouldIgnorePackage(pkg); ignore { - pkgString := fmt.Sprintf("%s/%s/%s", p.Ecosystem, p.Name, p.Version) + var pkgString string + if p.PURL != "" { + pkgString = p.PURL + } else { + pkgString = fmt.Sprintf("%s/%s/%s", p.Ecosystem, p.Name, p.Version) + } reason := ignoreLine.Reason if reason == "" { @@ -1086,6 +1118,13 @@ func patchPackageForRequest(pkg scannedPackage) scannedPackage { } } + // TODO: This should be done on the osv.dev side + // This is needed because Ubuntu ecosystem appends LTS + // but the scanner does not have this information. + if pkg.Ecosystem == "Ubuntu:20.04" { + pkg.Ecosystem = "Ubuntu:20.04:LTS" + } + return pkg } @@ -1161,7 +1200,7 @@ func makeLicensesRequests(packages []scannedPackage) ([][]models.License, error) } // Overrides Go version using osv-scanner.toml -func overrideGoVersion(r reporter.Reporter, packages []scannedPackage, configManager *config.ConfigManager) { +func overrideGoVersion(r reporter.Reporter, packages []scannedPackage, configManager *config.Manager) { for i, pkg := range packages { if pkg.Name == "stdlib" && pkg.Ecosystem == "Go" { configToUse := configManager.Get(r, pkg.Source.Path) diff --git a/pkg/osvscanner/osvscanner_internal_test.go b/pkg/osvscanner/osvscanner_internal_test.go index 8b53753e2d..ec2e5b071c 100644 --- a/pkg/osvscanner/osvscanner_internal_test.go +++ b/pkg/osvscanner/osvscanner_internal_test.go @@ -40,9 +40,9 @@ func Test_filterResults(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() r := &reporter.VoidReporter{} - // ConfigManager looks for osv-scanner.toml in the source path. + // configManager looks for osv-scanner.toml in the source path. // Sources in the test input should point to files/folders in the text fixture folder for this to work correctly. - configManager := config.ConfigManager{ + configManager := config.Manager{ DefaultConfig: config.Config{}, ConfigMap: make(map[string]config.Config), } diff --git a/pkg/osvscanner/vulnerability_result.go b/pkg/osvscanner/vulnerability_result.go index aa837ebb66..298488763e 100644 --- a/pkg/osvscanner/vulnerability_result.go +++ b/pkg/osvscanner/vulnerability_result.go @@ -24,7 +24,7 @@ func buildVulnerabilityResults( vulnsResp *osv.HydratedBatchedResponse, licensesResp [][]models.License, actions ScannerActions, - configManager *config.ConfigManager, + configManager *config.Manager, ) models.VulnerabilityResults { results := models.VulnerabilityResults{ Results: []models.PackageSource{}, diff --git a/pkg/osvscanner/vulnerability_result_internal_test.go b/pkg/osvscanner/vulnerability_result_internal_test.go index 7b1bcfa65f..c60d8503a6 100644 --- a/pkg/osvscanner/vulnerability_result_internal_test.go +++ b/pkg/osvscanner/vulnerability_result_internal_test.go @@ -19,7 +19,7 @@ func Test_assembleResult(t *testing.T) { vulnsResp *osv.HydratedBatchedResponse licensesResp [][]models.License actions ScannerActions - config *config.ConfigManager + config *config.Manager } packages := []scannedPackage{ { @@ -99,7 +99,7 @@ func Test_assembleResult(t *testing.T) { }, CallAnalysisStates: callAnalysisStates, }, - config: &config.ConfigManager{}, + config: &config.Manager{}, }, }, { name: "group_vulnerabilities_with_all_packages_included", @@ -115,7 +115,7 @@ func Test_assembleResult(t *testing.T) { }, CallAnalysisStates: callAnalysisStates, }, - config: &config.ConfigManager{}, + config: &config.Manager{}, }, }, { name: "group_vulnerabilities_with_licenses", @@ -132,7 +132,7 @@ func Test_assembleResult(t *testing.T) { }, CallAnalysisStates: callAnalysisStates, }, - config: &config.ConfigManager{}, + config: &config.Manager{}, }, }, { name: "group_vulnerabilities_with_license_allowlist", @@ -149,7 +149,7 @@ func Test_assembleResult(t *testing.T) { CallAnalysisStates: callAnalysisStates, }, - config: &config.ConfigManager{}, + config: &config.Manager{}, }, }, { name: "group_vulnerabilities_with_license_allowlist_and_license_override", @@ -165,7 +165,7 @@ func Test_assembleResult(t *testing.T) { }, CallAnalysisStates: callAnalysisStates, }, - config: &config.ConfigManager{ + config: &config.Manager{ OverrideConfig: &config.Config{ PackageOverrides: []config.PackageOverrideEntry{ { @@ -193,7 +193,7 @@ func Test_assembleResult(t *testing.T) { }, CallAnalysisStates: callAnalysisStates, }, - config: &config.ConfigManager{}, + config: &config.Manager{}, }, }} for _, tt := range tests { diff --git a/pkg/spdx/gen.go b/pkg/spdx/gen.go deleted file mode 100644 index 8c7daefd31..0000000000 --- a/pkg/spdx/gen.go +++ /dev/null @@ -1,62 +0,0 @@ -//go:build generate -// +build generate - -//go:generate go run gen.go - -package main - -import ( - "encoding/json" - "fmt" - "go/format" - "io/ioutil" - "net/http" - "strings" -) - -type License struct { - SPDXID string `json:"licenseId"` -} - -func main() { - resp, err := http.Get("https://raw.githubusercontent.com/spdx/license-list-data/main/json/licenses.json") - if err != nil { - panic(err) - } - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - panic(err) - } - - var licenseList struct { - Licenses []License `json:"licenses"` - } - err = json.Unmarshal(body, &licenseList) - if err != nil { - panic(err) - } - - output := strings.TrimLeft(` -// Code generated by gen.go. DO NOT EDIT. -// -// Deprecated: this is now private and should not be used outside the scanner -package spdx - -// Deprecated: this is now private and should not be used outside the scanner -var IDs = map[string]bool{ -`, "\n") - for _, license := range licenseList.Licenses { - output += fmt.Sprintf("%q: true,\n", strings.ToLower(license.SPDXID)) - } - output += "}" - formatted, err := format.Source([]byte(output)) - if err != nil { - panic(err) - } - err = ioutil.WriteFile("licenses.go", formatted, 0644) - if err != nil { - panic(err) - } -} diff --git a/pkg/spdx/licenses.go b/pkg/spdx/licenses.go deleted file mode 100644 index c389b69086..0000000000 --- a/pkg/spdx/licenses.go +++ /dev/null @@ -1,679 +0,0 @@ -// Code generated by gen.go. DO NOT EDIT. -// -// Deprecated: this is now private and should not be used outside the scanner -package spdx - -// Deprecated: this is now private and should not be used outside the scanner -var IDs = map[string]bool{ - "0bsd": true, - "3d-slicer-1.0": true, - "aal": true, - "abstyles": true, - "adacore-doc": true, - "adobe-2006": true, - "adobe-display-postscript": true, - "adobe-glyph": true, - "adobe-utopia": true, - "adsl": true, - "afl-1.1": true, - "afl-1.2": true, - "afl-2.0": true, - "afl-2.1": true, - "afl-3.0": true, - "afmparse": true, - "agpl-1.0": true, - "agpl-1.0-only": true, - "agpl-1.0-or-later": true, - "agpl-3.0": true, - "agpl-3.0-only": true, - "agpl-3.0-or-later": true, - "aladdin": true, - "amd-newlib": true, - "amdplpa": true, - "aml": true, - "aml-glslang": true, - "ampas": true, - "antlr-pd": true, - "antlr-pd-fallback": true, - "any-osi": true, - "apache-1.0": true, - "apache-1.1": true, - "apache-2.0": true, - "apafml": true, - "apl-1.0": true, - "app-s2p": true, - "apsl-1.0": true, - "apsl-1.1": true, - "apsl-1.2": true, - "apsl-2.0": true, - "arphic-1999": true, - "artistic-1.0": true, - "artistic-1.0-cl8": true, - "artistic-1.0-perl": true, - "artistic-2.0": true, - "aswf-digital-assets-1.0": true, - "aswf-digital-assets-1.1": true, - "baekmuk": true, - "bahyph": true, - "barr": true, - "bcrypt-solar-designer": true, - "beerware": true, - "bitstream-charter": true, - "bitstream-vera": true, - "bittorrent-1.0": true, - "bittorrent-1.1": true, - "blessing": true, - "blueoak-1.0.0": true, - "boehm-gc": true, - "boehm-gc-without-fee": true, - "borceux": true, - "brian-gladman-2-clause": true, - "brian-gladman-3-clause": true, - "bsd-1-clause": true, - "bsd-2-clause": true, - "bsd-2-clause-darwin": true, - "bsd-2-clause-first-lines": true, - "bsd-2-clause-freebsd": true, - "bsd-2-clause-netbsd": true, - "bsd-2-clause-patent": true, - "bsd-2-clause-views": true, - "bsd-3-clause": true, - "bsd-3-clause-acpica": true, - "bsd-3-clause-attribution": true, - "bsd-3-clause-clear": true, - "bsd-3-clause-flex": true, - "bsd-3-clause-hp": true, - "bsd-3-clause-lbnl": true, - "bsd-3-clause-modification": true, - "bsd-3-clause-no-military-license": true, - "bsd-3-clause-no-nuclear-license": true, - "bsd-3-clause-no-nuclear-license-2014": true, - "bsd-3-clause-no-nuclear-warranty": true, - "bsd-3-clause-open-mpi": true, - "bsd-3-clause-sun": true, - "bsd-4-clause": true, - "bsd-4-clause-shortened": true, - "bsd-4-clause-uc": true, - "bsd-4.3reno": true, - "bsd-4.3tahoe": true, - "bsd-advertising-acknowledgement": true, - "bsd-attribution-hpnd-disclaimer": true, - "bsd-inferno-nettverk": true, - "bsd-protection": true, - "bsd-source-beginning-file": true, - "bsd-source-code": true, - "bsd-systemics": true, - "bsd-systemics-w3works": true, - "bsl-1.0": true, - "busl-1.1": true, - "bzip2-1.0.5": true, - "bzip2-1.0.6": true, - "c-uda-1.0": true, - "cal-1.0": true, - "cal-1.0-combined-work-exception": true, - "caldera": true, - "caldera-no-preamble": true, - "catharon": true, - "catosl-1.1": true, - "cc-by-1.0": true, - "cc-by-2.0": true, - "cc-by-2.5": true, - "cc-by-2.5-au": true, - "cc-by-3.0": true, - "cc-by-3.0-at": true, - "cc-by-3.0-au": true, - "cc-by-3.0-de": true, - "cc-by-3.0-igo": true, - "cc-by-3.0-nl": true, - "cc-by-3.0-us": true, - "cc-by-4.0": true, - "cc-by-nc-1.0": true, - "cc-by-nc-2.0": true, - "cc-by-nc-2.5": true, - "cc-by-nc-3.0": true, - "cc-by-nc-3.0-de": true, - "cc-by-nc-4.0": true, - "cc-by-nc-nd-1.0": true, - "cc-by-nc-nd-2.0": true, - "cc-by-nc-nd-2.5": true, - "cc-by-nc-nd-3.0": true, - "cc-by-nc-nd-3.0-de": true, - "cc-by-nc-nd-3.0-igo": true, - "cc-by-nc-nd-4.0": true, - "cc-by-nc-sa-1.0": true, - "cc-by-nc-sa-2.0": true, - "cc-by-nc-sa-2.0-de": true, - "cc-by-nc-sa-2.0-fr": true, - "cc-by-nc-sa-2.0-uk": true, - "cc-by-nc-sa-2.5": true, - "cc-by-nc-sa-3.0": true, - "cc-by-nc-sa-3.0-de": true, - "cc-by-nc-sa-3.0-igo": true, - "cc-by-nc-sa-4.0": true, - "cc-by-nd-1.0": true, - "cc-by-nd-2.0": true, - "cc-by-nd-2.5": true, - "cc-by-nd-3.0": true, - "cc-by-nd-3.0-de": true, - "cc-by-nd-4.0": true, - "cc-by-sa-1.0": true, - "cc-by-sa-2.0": true, - "cc-by-sa-2.0-uk": true, - "cc-by-sa-2.1-jp": true, - "cc-by-sa-2.5": true, - "cc-by-sa-3.0": true, - "cc-by-sa-3.0-at": true, - "cc-by-sa-3.0-de": true, - "cc-by-sa-3.0-igo": true, - "cc-by-sa-4.0": true, - "cc-pddc": true, - "cc0-1.0": true, - "cddl-1.0": true, - "cddl-1.1": true, - "cdl-1.0": true, - "cdla-permissive-1.0": true, - "cdla-permissive-2.0": true, - "cdla-sharing-1.0": true, - "cecill-1.0": true, - "cecill-1.1": true, - "cecill-2.0": true, - "cecill-2.1": true, - "cecill-b": true, - "cecill-c": true, - "cern-ohl-1.1": true, - "cern-ohl-1.2": true, - "cern-ohl-p-2.0": true, - "cern-ohl-s-2.0": true, - "cern-ohl-w-2.0": true, - "cfitsio": true, - "check-cvs": true, - "checkmk": true, - "clartistic": true, - "clips": true, - "cmu-mach": true, - "cmu-mach-nodoc": true, - "cnri-jython": true, - "cnri-python": true, - "cnri-python-gpl-compatible": true, - "coil-1.0": true, - "community-spec-1.0": true, - "condor-1.1": true, - "copyleft-next-0.3.0": true, - "copyleft-next-0.3.1": true, - "cornell-lossless-jpeg": true, - "cpal-1.0": true, - "cpl-1.0": true, - "cpol-1.02": true, - "cronyx": true, - "crossword": true, - "crystalstacker": true, - "cua-opl-1.0": true, - "cube": true, - "curl": true, - "cve-tou": true, - "d-fsl-1.0": true, - "dec-3-clause": true, - "diffmark": true, - "dl-de-by-2.0": true, - "dl-de-zero-2.0": true, - "doc": true, - "docbook-schema": true, - "docbook-stylesheet": true, - "docbook-xml": true, - "dotseqn": true, - "drl-1.0": true, - "drl-1.1": true, - "dsdp": true, - "dtoa": true, - "dvipdfm": true, - "ecl-1.0": true, - "ecl-2.0": true, - "ecos-2.0": true, - "efl-1.0": true, - "efl-2.0": true, - "egenix": true, - "elastic-2.0": true, - "entessa": true, - "epics": true, - "epl-1.0": true, - "epl-2.0": true, - "erlpl-1.1": true, - "etalab-2.0": true, - "eudatagrid": true, - "eupl-1.0": true, - "eupl-1.1": true, - "eupl-1.2": true, - "eurosym": true, - "fair": true, - "fbm": true, - "fdk-aac": true, - "ferguson-twofish": true, - "frameworx-1.0": true, - "freebsd-doc": true, - "freeimage": true, - "fsfap": true, - "fsfap-no-warranty-disclaimer": true, - "fsful": true, - "fsfullr": true, - "fsfullrwd": true, - "ftl": true, - "furuseth": true, - "fwlw": true, - "gcr-docs": true, - "gd": true, - "gfdl-1.1": true, - "gfdl-1.1-invariants-only": true, - "gfdl-1.1-invariants-or-later": true, - "gfdl-1.1-no-invariants-only": true, - "gfdl-1.1-no-invariants-or-later": true, - "gfdl-1.1-only": true, - "gfdl-1.1-or-later": true, - "gfdl-1.2": true, - "gfdl-1.2-invariants-only": true, - "gfdl-1.2-invariants-or-later": true, - "gfdl-1.2-no-invariants-only": true, - "gfdl-1.2-no-invariants-or-later": true, - "gfdl-1.2-only": true, - "gfdl-1.2-or-later": true, - "gfdl-1.3": true, - "gfdl-1.3-invariants-only": true, - "gfdl-1.3-invariants-or-later": true, - "gfdl-1.3-no-invariants-only": true, - "gfdl-1.3-no-invariants-or-later": true, - "gfdl-1.3-only": true, - "gfdl-1.3-or-later": true, - "giftware": true, - "gl2ps": true, - "glide": true, - "glulxe": true, - "glwtpl": true, - "gnuplot": true, - "gpl-1.0": true, - "gpl-1.0+": true, - "gpl-1.0-only": true, - "gpl-1.0-or-later": true, - "gpl-2.0": true, - "gpl-2.0+": true, - "gpl-2.0-only": true, - "gpl-2.0-or-later": true, - "gpl-2.0-with-autoconf-exception": true, - "gpl-2.0-with-bison-exception": true, - "gpl-2.0-with-classpath-exception": true, - "gpl-2.0-with-font-exception": true, - "gpl-2.0-with-gcc-exception": true, - "gpl-3.0": true, - "gpl-3.0+": true, - "gpl-3.0-only": true, - "gpl-3.0-or-later": true, - "gpl-3.0-with-autoconf-exception": true, - "gpl-3.0-with-gcc-exception": true, - "graphics-gems": true, - "gsoap-1.3b": true, - "gtkbook": true, - "gutmann": true, - "haskellreport": true, - "hdparm": true, - "hidapi": true, - "hippocratic-2.1": true, - "hp-1986": true, - "hp-1989": true, - "hpnd": true, - "hpnd-dec": true, - "hpnd-doc": true, - "hpnd-doc-sell": true, - "hpnd-export-us": true, - "hpnd-export-us-acknowledgement": true, - "hpnd-export-us-modify": true, - "hpnd-export2-us": true, - "hpnd-fenneberg-livingston": true, - "hpnd-inria-imag": true, - "hpnd-intel": true, - "hpnd-kevlin-henney": true, - "hpnd-markus-kuhn": true, - "hpnd-merchantability-variant": true, - "hpnd-mit-disclaimer": true, - "hpnd-netrek": true, - "hpnd-pbmplus": true, - "hpnd-sell-mit-disclaimer-xserver": true, - "hpnd-sell-regexpr": true, - "hpnd-sell-variant": true, - "hpnd-sell-variant-mit-disclaimer": true, - "hpnd-sell-variant-mit-disclaimer-rev": true, - "hpnd-uc": true, - "hpnd-uc-export-us": true, - "htmltidy": true, - "ibm-pibs": true, - "icu": true, - "iec-code-components-eula": true, - "ijg": true, - "ijg-short": true, - "imagemagick": true, - "imatix": true, - "imlib2": true, - "info-zip": true, - "inner-net-2.0": true, - "intel": true, - "intel-acpi": true, - "interbase-1.0": true, - "ipa": true, - "ipl-1.0": true, - "isc": true, - "isc-veillard": true, - "jam": true, - "jasper-2.0": true, - "jpl-image": true, - "jpnic": true, - "json": true, - "kastrup": true, - "kazlib": true, - "knuth-ctan": true, - "lal-1.2": true, - "lal-1.3": true, - "latex2e": true, - "latex2e-translated-notice": true, - "leptonica": true, - "lgpl-2.0": true, - "lgpl-2.0+": true, - "lgpl-2.0-only": true, - "lgpl-2.0-or-later": true, - "lgpl-2.1": true, - "lgpl-2.1+": true, - "lgpl-2.1-only": true, - "lgpl-2.1-or-later": true, - "lgpl-3.0": true, - "lgpl-3.0+": true, - "lgpl-3.0-only": true, - "lgpl-3.0-or-later": true, - "lgpllr": true, - "libpng": true, - "libpng-2.0": true, - "libselinux-1.0": true, - "libtiff": true, - "libutil-david-nugent": true, - "liliq-p-1.1": true, - "liliq-r-1.1": true, - "liliq-rplus-1.1": true, - "linux-man-pages-1-para": true, - "linux-man-pages-copyleft": true, - "linux-man-pages-copyleft-2-para": true, - "linux-man-pages-copyleft-var": true, - "linux-openib": true, - "loop": true, - "lpd-document": true, - "lpl-1.0": true, - "lpl-1.02": true, - "lppl-1.0": true, - "lppl-1.1": true, - "lppl-1.2": true, - "lppl-1.3a": true, - "lppl-1.3c": true, - "lsof": true, - "lucida-bitmap-fonts": true, - "lzma-sdk-9.11-to-9.20": true, - "lzma-sdk-9.22": true, - "mackerras-3-clause": true, - "mackerras-3-clause-acknowledgment": true, - "magaz": true, - "mailprio": true, - "makeindex": true, - "martin-birgmeier": true, - "mcphee-slideshow": true, - "metamail": true, - "minpack": true, - "miros": true, - "mit": true, - "mit-0": true, - "mit-advertising": true, - "mit-click": true, - "mit-cmu": true, - "mit-enna": true, - "mit-feh": true, - "mit-festival": true, - "mit-khronos-old": true, - "mit-modern-variant": true, - "mit-open-group": true, - "mit-testregex": true, - "mit-wu": true, - "mitnfa": true, - "mmixware": true, - "motosoto": true, - "mpeg-ssg": true, - "mpi-permissive": true, - "mpich2": true, - "mpl-1.0": true, - "mpl-1.1": true, - "mpl-2.0": true, - "mpl-2.0-no-copyleft-exception": true, - "mplus": true, - "ms-lpl": true, - "ms-pl": true, - "ms-rl": true, - "mtll": true, - "mulanpsl-1.0": true, - "mulanpsl-2.0": true, - "multics": true, - "mup": true, - "naist-2003": true, - "nasa-1.3": true, - "naumen": true, - "nbpl-1.0": true, - "ncbi-pd": true, - "ncgl-uk-2.0": true, - "ncl": true, - "ncsa": true, - "net-snmp": true, - "netcdf": true, - "newsletr": true, - "ngpl": true, - "nicta-1.0": true, - "nist-pd": true, - "nist-pd-fallback": true, - "nist-software": true, - "nlod-1.0": true, - "nlod-2.0": true, - "nlpl": true, - "nokia": true, - "nosl": true, - "noweb": true, - "npl-1.0": true, - "npl-1.1": true, - "nposl-3.0": true, - "nrl": true, - "ntp": true, - "ntp-0": true, - "nunit": true, - "o-uda-1.0": true, - "oar": true, - "occt-pl": true, - "oclc-2.0": true, - "odbl-1.0": true, - "odc-by-1.0": true, - "offis": true, - "ofl-1.0": true, - "ofl-1.0-no-rfn": true, - "ofl-1.0-rfn": true, - "ofl-1.1": true, - "ofl-1.1-no-rfn": true, - "ofl-1.1-rfn": true, - "ogc-1.0": true, - "ogdl-taiwan-1.0": true, - "ogl-canada-2.0": true, - "ogl-uk-1.0": true, - "ogl-uk-2.0": true, - "ogl-uk-3.0": true, - "ogtsl": true, - "oldap-1.1": true, - "oldap-1.2": true, - "oldap-1.3": true, - "oldap-1.4": true, - "oldap-2.0": true, - "oldap-2.0.1": true, - "oldap-2.1": true, - "oldap-2.2": true, - "oldap-2.2.1": true, - "oldap-2.2.2": true, - "oldap-2.3": true, - "oldap-2.4": true, - "oldap-2.5": true, - "oldap-2.6": true, - "oldap-2.7": true, - "oldap-2.8": true, - "olfl-1.3": true, - "oml": true, - "openpbs-2.3": true, - "openssl": true, - "openssl-standalone": true, - "openvision": true, - "opl-1.0": true, - "opl-uk-3.0": true, - "opubl-1.0": true, - "oset-pl-2.1": true, - "osl-1.0": true, - "osl-1.1": true, - "osl-2.0": true, - "osl-2.1": true, - "osl-3.0": true, - "padl": true, - "parity-6.0.0": true, - "parity-7.0.0": true, - "pddl-1.0": true, - "php-3.0": true, - "php-3.01": true, - "pixar": true, - "pkgconf": true, - "plexus": true, - "pnmstitch": true, - "polyform-noncommercial-1.0.0": true, - "polyform-small-business-1.0.0": true, - "postgresql": true, - "ppl": true, - "psf-2.0": true, - "psfrag": true, - "psutils": true, - "python-2.0": true, - "python-2.0.1": true, - "python-ldap": true, - "qhull": true, - "qpl-1.0": true, - "qpl-1.0-inria-2004": true, - "radvd": true, - "rdisc": true, - "rhecos-1.1": true, - "rpl-1.1": true, - "rpl-1.5": true, - "rpsl-1.0": true, - "rsa-md": true, - "rscpl": true, - "ruby": true, - "ruby-pty": true, - "sax-pd": true, - "sax-pd-2.0": true, - "saxpath": true, - "scea": true, - "schemereport": true, - "sendmail": true, - "sendmail-8.23": true, - "sendmail-open-source-1.1": true, - "sgi-b-1.0": true, - "sgi-b-1.1": true, - "sgi-b-2.0": true, - "sgi-opengl": true, - "sgp4": true, - "shl-0.5": true, - "shl-0.51": true, - "simpl-2.0": true, - "sissl": true, - "sissl-1.2": true, - "sl": true, - "sleepycat": true, - "smlnj": true, - "smppl": true, - "snia": true, - "snprintf": true, - "softsurfer": true, - "soundex": true, - "spencer-86": true, - "spencer-94": true, - "spencer-99": true, - "spl-1.0": true, - "ssh-keyscan": true, - "ssh-openssh": true, - "ssh-short": true, - "ssleay-standalone": true, - "sspl-1.0": true, - "standardml-nj": true, - "sugarcrm-1.1.3": true, - "sun-ppp": true, - "sun-ppp-2000": true, - "sunpro": true, - "swl": true, - "swrule": true, - "symlinks": true, - "tapr-ohl-1.0": true, - "tcl": true, - "tcp-wrappers": true, - "termreadkey": true, - "tgppl-1.0": true, - "threeparttable": true, - "tmate": true, - "torque-1.1": true, - "tosl": true, - "tpdl": true, - "tpl-1.0": true, - "trustedqsl": true, - "ttwl": true, - "ttyp0": true, - "tu-berlin-1.0": true, - "tu-berlin-2.0": true, - "ubuntu-font-1.0": true, - "ucar": true, - "ucl-1.0": true, - "ulem": true, - "umich-merit": true, - "unicode-3.0": true, - "unicode-dfs-2015": true, - "unicode-dfs-2016": true, - "unicode-tou": true, - "unixcrypt": true, - "unlicense": true, - "upl-1.0": true, - "urt-rle": true, - "vim": true, - "vostrom": true, - "vsl-1.0": true, - "w3c": true, - "w3c-19980720": true, - "w3c-20150513": true, - "w3m": true, - "watcom-1.0": true, - "widget-workshop": true, - "wsuipa": true, - "wtfpl": true, - "wxwindows": true, - "x11": true, - "x11-distribute-modifications-variant": true, - "x11-swapped": true, - "xdebug-1.03": true, - "xerox": true, - "xfig": true, - "xfree86-1.1": true, - "xinetd": true, - "xkeyboard-config-zinoviev": true, - "xlock": true, - "xnet": true, - "xpp": true, - "xskat": true, - "xzoom": true, - "ypl-1.0": true, - "ypl-1.1": true, - "zed": true, - "zeeff": true, - "zend-2.0": true, - "zimbra-1.3": true, - "zimbra-1.4": true, - "zlib": true, - "zlib-acknowledgement": true, - "zpl-1.1": true, - "zpl-2.0": true, - "zpl-2.1": true, -} diff --git a/pkg/spdx/verify.go b/pkg/spdx/verify.go deleted file mode 100644 index df36e621fc..0000000000 --- a/pkg/spdx/verify.go +++ /dev/null @@ -1,19 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package spdx - -import "strings" - -// Unrecognized filters licenses for non-spdx identifiers. The "unknown" string is -// also treated as a valid identifier. -// -// Deprecated: this is now private and should not be used outside the scanner -func Unrecognized(licenses []string) (unrecognized []string) { - for _, license := range licenses { - l := strings.ToLower(license) - if !IDs[l] && l != "unknown" { - unrecognized = append(unrecognized, license) - } - } - - return unrecognized -} diff --git a/pkg/spdx/verify_test.go b/pkg/spdx/verify_test.go deleted file mode 100644 index f0e0cecd4b..0000000000 --- a/pkg/spdx/verify_test.go +++ /dev/null @@ -1,37 +0,0 @@ -package spdx - -import ( - "reflect" - "testing" -) - -func Test_unrecognized(t *testing.T) { - t.Parallel() - tests := []struct { - name string - licenses []string - want []string - }{ - { - name: "all recognized licenses", - licenses: []string{"agpl-1.0", "MIT", "apache-1.0", "UNKNOWN"}, - want: nil, - }, { - name: "all unrecognized licenses", - licenses: []string{"agpl1.0", "unrecognized license", "apache1.0"}, - want: []string{"agpl1.0", "unrecognized license", "apache1.0"}, - }, { - name: "some recognized, some unrecognized licenses", - licenses: []string{"agpl-1.0", "unrecognized license", "apache-1.0"}, - want: []string{"unrecognized license"}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if got := Unrecognized(tt.licenses); !reflect.DeepEqual(got, tt.want) { - t.Errorf("Unrecognized() = %v,\nwant %v", got, tt.want) - } - }) - } -}