diff --git a/.github/workflows/auto-update-labels.yaml b/.github/workflows/auto-update-labels.yaml index 6dab8481873b..2c52775d7803 100644 --- a/.github/workflows/auto-update-labels.yaml +++ b/.github/workflows/auto-update-labels.yaml @@ -20,7 +20,7 @@ jobs: go-version-file: go.mod - name: Install aqua tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 diff --git a/.github/workflows/canary.yaml b/.github/workflows/canary.yaml index 516fa54fd12e..d65878b8d84b 100644 --- a/.github/workflows/canary.yaml +++ b/.github/workflows/canary.yaml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Restore Trivy binaries from cache - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.2 with: path: dist/ key: ${{ runner.os }}-bins-${{github.workflow}}-${{github.sha}} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 40baeb83c01a..acf840769eea 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -24,7 +24,7 @@ jobs: fetch-depth: 0 - name: Restore Trivy binaries from cache - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.2 with: path: dist/ key: ${{ runner.os }}-bins-${{github.workflow}}-${{github.sha}} diff --git a/.github/workflows/reusable-release.yaml b/.github/workflows/reusable-release.yaml index f518c5080aad..53869517256d 100644 --- a/.github/workflows/reusable-release.yaml +++ b/.github/workflows/reusable-release.yaml @@ -121,7 +121,7 @@ jobs: public.ecr.aws/aquasecurity/trivy:canary - name: Cache Trivy binaries - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.2 with: path: dist/ # use 'github.sha' to create a unique cache folder for each run. diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fb63908b3676..8b7477733b9b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -57,7 +57,7 @@ jobs: if: ${{ failure() && steps.lint.conclusion == 'failure' }} - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 aqua_opts: "" @@ -87,7 +87,7 @@ jobs: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 @@ -116,7 +116,7 @@ jobs: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 @@ -136,7 +136,7 @@ jobs: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 @@ -166,7 +166,7 @@ jobs: with: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 - name: Run vm integration tests diff --git a/contrib/gitlab.tpl b/contrib/gitlab.tpl index 187438776b72..744c0c9394cb 100644 --- a/contrib/gitlab.tpl +++ b/contrib/gitlab.tpl @@ -73,8 +73,11 @@ {{- /* TODO: Type not extractable - https://github.com/aquasecurity/trivy-db/pull/24 */}} "type": "cve", "name": "{{ .VulnerabilityID }}", - "value": "{{ .VulnerabilityID }}", + "value": "{{ .VulnerabilityID }}" + {{- /* cf. https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/e3d280d7f0862ca66a1555ea8b24016a004bb914/dist/container-scanning-report-format.json#L157-179 */}} + {{- if .PrimaryURL | regexMatch "^(https?|ftp)://.+" -}}, "url": "{{ .PrimaryURL }}" + {{- end }} } ], "links": [ @@ -85,9 +88,13 @@ {{- else -}} , {{- end -}} + {{- if . | regexMatch "^(https?|ftp)://.+" -}} { - "url": "{{ regexFind "[^ ]+" . }}" + "url": "{{ . }}" } + {{- else -}} + {{- $l_first = true }} + {{- end -}} {{- end }} ] } diff --git a/docs/docs/configuration/reporting.md b/docs/docs/configuration/reporting.md index 93468222e99b..117db88de866 100644 --- a/docs/docs/configuration/reporting.md +++ b/docs/docs/configuration/reporting.md @@ -63,6 +63,7 @@ The following languages are currently supported: | Go | [go.mod][go-mod] | | PHP | [composer.lock][composer-lock] | | Java | [pom.xml][pom-xml] | +| | [*gradle.lockfile][gradle-lockfile] | | Dart | [pubspec.lock][pubspec-lock] | This tree is the reverse of the dependency graph. @@ -445,5 +446,6 @@ $ trivy convert --format table --severity CRITICAL result.json [go-mod]: ../coverage/language/golang.md#go-modules [composer-lock]: ../coverage/language/php.md#composer [pom-xml]: ../coverage/language/java.md#pomxml +[gradle-lockfile]: ../coverage/language/java.md#gradlelock [pubspec-lock]: ../coverage/language/dart.md#dart [cargo-binaries]: ../coverage/language/rust.md#binaries \ No newline at end of file diff --git a/docs/docs/coverage/iac/index.md b/docs/docs/coverage/iac/index.md index 21f0209f2fad..168c3dd650fa 100644 --- a/docs/docs/coverage/iac/index.md +++ b/docs/docs/coverage/iac/index.md @@ -8,14 +8,15 @@ Trivy scans Infrastructure as Code (IaC) files for ## Supported configurations -| Config type | File patterns | -|-------------------------------------|-------------------------------| -| [Kubernetes](kubernetes.md) | *.yml, *.yaml, *.json | -| [Docker](docker.md) | Dockerfile, Containerfile | -| [Terraform](terraform.md) | *.tf, *.tf.json, *.tfvars, | -| [CloudFormation](cloudformation.md) | *.yml, *.yaml, *.json | -| [Azure ARM Template](azure-arm.md) | *.json | -| [Helm](helm.md) | *.yaml, *.tpl, *.tar.gz, etc. | +| Config type | File patterns | +|-------------------------------------|-----------------------------------------------| +| [Kubernetes](kubernetes.md) | \*.yml, \*.yaml, \*.json | +| [Docker](docker.md) | Dockerfile, Containerfile | +| [Terraform](terraform.md) | \*.tf, \*.tf.json, \*.tfvars | +| [Terraform Plan](terraform.md) | tfplan, \*.tfplan, \*.tfplan.json, \*.tf.json | +| [CloudFormation](cloudformation.md) | \*.yml, \*.yaml, \*.json | +| [Azure ARM Template](azure-arm.md) | \*.json | +| [Helm](helm.md) | \*.yaml, \*.tpl, \*.tar.gz, etc. | [misconf]: ../../scanner/misconfiguration/index.md [secret]: ../../scanner/secret.md diff --git a/docs/docs/coverage/language/java.md b/docs/docs/coverage/language/java.md index 59a9ba571506..e2e97b46c61f 100644 --- a/docs/docs/coverage/language/java.md +++ b/docs/docs/coverage/language/java.md @@ -3,11 +3,11 @@ Trivy supports three types of Java scanning: `JAR/WAR/PAR/EAR`, `pom.xml` and `* Each artifact supports the following scanners: -| Artifact | SBOM | Vulnerability | License | -| ---------------- | :---: | :-----------: | :-----: | -| JAR/WAR/PAR/EAR | ✓ | ✓ | - | -| pom.xml | ✓ | ✓ | ✓ | -| *gradle.lockfile | ✓ | ✓ | - | +| Artifact | SBOM | Vulnerability | License | +|------------------|:----:|:-------------:|:-------:| +| JAR/WAR/PAR/EAR | ✓ | ✓ | - | +| pom.xml | ✓ | ✓ | ✓ | +| *gradle.lockfile | ✓ | ✓ | ✓ | The following table provides an outline of the features Trivy offers. @@ -15,7 +15,7 @@ The following table provides an outline of the features Trivy offers. |------------------|:---------------------:|:----------------:|:------------------------------------:|:--------:| | JAR/WAR/PAR/EAR | Trivy Java DB | Include | - | - | | pom.xml | Maven repository [^1] | Exclude | ✓ | ✓[^7] | -| *gradle.lockfile | - | Exclude | - | ✓ | +| *gradle.lockfile | - | Exclude | ✓ | ✓ | These may be enabled or disabled depending on the target. See [here](./index.md) for the detail. @@ -64,11 +64,24 @@ If you need to show them, use the `--include-dev-deps` flag. ## Gradle.lock -`gradle.lock` files contain all necessary information about used dependencies. -Trivy simply parses the file, extract dependencies, and finds vulnerabilities for them. -It doesn't require the internet access. +`gradle.lock` files only contain information about used dependencies. + +!!!note + All necessary files are checked locally. Gradle file scanning doesn't require internet access. + +### Dependency-tree +!!! warning "EXPERIMENTAL" + This feature might change without preserving backwards compatibility. +Trivy finds child dependencies from `*.pom` files in the cache[^8] directory. + +But there is no reliable way to determine direct dependencies (even using other files). +Therefore, we mark all dependencies as indirect to use logic to guess direct dependencies and build a dependency tree. + +### Licenses +Trity also can detect licenses for dependencies. + +Make sure that you have cache[^8] directory to find licenses from `*.pom` dependency files. -[^1]: https://github.com/aquasecurity/trivy-java-db [^1]: Uses maven repository to get information about dependencies. Internet access required. [^2]: It means `*.jar`, `*.war`, `*.par` and `*.ear` file [^3]: `ArtifactID`, `GroupID` and `Version` @@ -76,6 +89,7 @@ It doesn't require the internet access. [^5]: When you use dependency path in `relativePath` field in pom.xml file [^6]: `/Users//.m2/repository` (for Linux and Mac) and `C:/Users//.m2/repository` (for Windows) by default [^7]: To avoid confusion, Trivy only finds locations for direct dependencies from the base pom.xml file. +[^8]: The supported directories are `$GRADLE_USER_HOME/caches` and `$HOME/.gradle/caches` (`%HOMEPATH%\.gradle\caches` for Windows). [dependency-graph]: ../../configuration/reporting.md#show-origins-of-vulnerable-dependencies [maven-invoker-plugin]: https://maven.apache.org/plugins/maven-invoker-plugin/usage.html \ No newline at end of file diff --git a/docs/docs/references/configuration/cli/trivy.md b/docs/docs/references/configuration/cli/trivy.md index f11635a25992..f3c543a210f9 100644 --- a/docs/docs/references/configuration/cli/trivy.md +++ b/docs/docs/references/configuration/cli/trivy.md @@ -53,7 +53,7 @@ trivy [global flags] command [flags] target * [trivy plugin](trivy_plugin.md) - Manage plugins * [trivy repository](trivy_repository.md) - Scan a repository * [trivy rootfs](trivy_rootfs.md) - Scan rootfs -* [trivy sbom](trivy_sbom.md) - Scan SBOM for vulnerabilities +* [trivy sbom](trivy_sbom.md) - Scan SBOM for vulnerabilities and licenses * [trivy server](trivy_server.md) - Server mode * [trivy version](trivy_version.md) - Print the version * [trivy vm](trivy_vm.md) - [EXPERIMENTAL] Scan a virtual machine image diff --git a/docs/docs/references/configuration/cli/trivy_sbom.md b/docs/docs/references/configuration/cli/trivy_sbom.md index f30144c34e9d..5d941e9744ba 100644 --- a/docs/docs/references/configuration/cli/trivy_sbom.md +++ b/docs/docs/references/configuration/cli/trivy_sbom.md @@ -1,6 +1,6 @@ ## trivy sbom -Scan SBOM for vulnerabilities +Scan SBOM for vulnerabilities and licenses ``` trivy sbom [flags] SBOM_PATH @@ -36,6 +36,7 @@ trivy sbom [flags] SBOM_PATH --ignore-policy string specify the Rego file path to evaluate each vulnerability --ignore-status strings comma-separated list of vulnerability status to ignore (unknown,not_affected,affected,fixed,under_investigation,will_not_fix,fix_deferred,end_of_life) --ignore-unfixed display only fixed vulnerabilities + --ignored-licenses strings specify a list of license to ignore --ignorefile string specify .trivyignore file (default ".trivyignore") --java-db-repository string OCI repository to retrieve trivy-java-db from (default "ghcr.io/aquasecurity/trivy-java-db:1") --list-all-pkgs enabling the option will output all packages regardless of vulnerability @@ -50,6 +51,7 @@ trivy sbom [flags] SBOM_PATH --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") --reset remove all caches and database --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) + --scanners strings comma-separated list of what security issues to detect (vuln,license) (default [vuln]) --server string server address in client mode -s, --severity strings severities of security issues to be displayed (UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL) (default [UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL]) --show-suppressed [EXPERIMENTAL] show suppressed vulnerabilities diff --git a/docs/docs/scanner/license.md b/docs/docs/scanner/license.md index dad487965481..7472011af671 100644 --- a/docs/docs/scanner/license.md +++ b/docs/docs/scanner/license.md @@ -22,18 +22,16 @@ Check out [the coverage document][coverage] for details. To enable extended license scanning, you can use `--license-full`. In addition to package licenses, Trivy scans source code files, Markdown documents, text files and `LICENSE` documents to identify license usage within the image or filesystem. -By default, Trivy only classifies licenses that are matched with a confidence level of 0.9 or more by the classifer. +By default, Trivy only classifies licenses that are matched with a confidence level of 0.9 or more by the classifier. To configure the confidence level, you can use `--license-confidence-level`. This enables us to classify licenses that might be matched with a lower confidence level by the classifer. !!! note The full license scanning is expensive. It takes a while. -Currently, the standard license scanning doesn't support filesystem and repository scanning. - -| License scanning | Image | Rootfs | Filesystem | Repository | -| :-------------------: | :---: | :----: | :--------: | :--------: | -| Standard | ✅ | ✅ | - | - | -| Full (--license-full) | ✅ | ✅ | ✅ | ✅ | +| License scanning | Image | Rootfs | Filesystem | Repository | SBOM | +|:---------------------:|:-----:|:------:|:----------:|:----------:|:----:| +| Standard | ✅ | ✅ | ✅[^1][^2] | ✅[^1][^2] | ✅ | +| Full (--license-full) | ✅ | ✅ | ✅ | ✅ | - | License checking classifies the identified licenses and map the classification to severity. @@ -344,6 +342,8 @@ license: permissive: [] ``` +[^1]: See the list of supported language files [here](../coverage/language/index.md). +[^2]: Some lock files require additional files (e.g. files from the cache directory) to detect licenses. Check [coverage][coverage] for more information. [coverage]: ../coverage/index.md [google-license-classification]: https://opensource.google/documentation/reference/thirdparty/licenses diff --git a/docs/docs/scanner/misconfiguration/index.md b/docs/docs/scanner/misconfiguration/index.md index b1107a530718..b243d3e8dc17 100644 --- a/docs/docs/scanner/misconfiguration/index.md +++ b/docs/docs/scanner/misconfiguration/index.md @@ -381,7 +381,7 @@ If multiple variables evaluate to the same hostname, Trivy will choose the envir ### Skipping resources by inline comments -Trivy supports ignoring misconfigured resources by inline comments for Terraform configuration files only. +Trivy supports ignoring misconfigured resources by inline comments for Terraform and CloudFormation configuration files only. In cases where Trivy can detect comments of a specific format immediately adjacent to resource definitions, it is possible to ignore findings from a single source of resource definition (in contrast to `.trivyignore`, which has a directory-wide scope on all of the files scanned). The format for these comments is `trivy:ignore:` immediately following the format-specific line-comment [token](https://developer.hashicorp.com/terraform/language/syntax/configuration#comments). @@ -422,6 +422,17 @@ As an example, consider the following check metadata: Long ID would look like the following: `aws-s3-enable-logging`. +Example for CloudFromation: +```yaml +AWSTemplateFormatVersion: "2010-09-09" +Resources: +#trivy:ignore:* + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket +``` + #### Expiration Date You can specify the expiration date of the ignore rule in `yyyy-mm-dd` format. This is a useful feature when you want to make sure that an ignored issue is not forgotten and worth revisiting in the future. For example: @@ -494,8 +505,21 @@ resource "aws_security_group_rule" "example" { } ``` -!!! note - Currently nested attributes are not supported. For example you will not be able to reference the `each.key` attribute. +Checks can also be ignored by nested attributes, but certain restrictions apply: + +- You cannot access an individual block using indexes, for example when working with dynamic blocks. +- Special variables like [each](https://developer.hashicorp.com/terraform/language/meta-arguments/for_each#the-each-object) and [count](https://developer.hashicorp.com/terraform/language/meta-arguments/count#the-count-object) cannot be accessed. + +```tf +#trivy:ignore:*[logging_config.prefix=myprefix] +resource "aws_cloudfront_distribution" "example" { + logging_config { + include_cookies = false + bucket = "mylogs.s3.amazonaws.com" + prefix = "myprefix" + } +} +``` #### Ignoring module issues @@ -523,4 +547,15 @@ module "s3_bucket" { bucket = each.value } ``` -[custom]: custom/index.md \ No newline at end of file + +#### Support for Wildcards + +You can use wildcards in the `ws` (workspace) and `ignore` sections of the ignore rules. + +```tf +# trivy:ignore:aws-s3-*:ws:dev-* +``` + +This example ignores all checks starting with `aws-s3-` for workspaces matching the pattern `dev-*`. + +[custom]: custom/index.md diff --git a/docs/docs/target/sbom.md b/docs/docs/target/sbom.md index a287455fc68a..4ea50035df1c 100644 --- a/docs/docs/target/sbom.md +++ b/docs/docs/target/sbom.md @@ -1,6 +1,6 @@ # SBOM scanning -Trivy can take the following SBOM formats as an input and scan for vulnerabilities. +Trivy can take the following SBOM formats as an input and scan for vulnerabilities and licenses. - CycloneDX - SPDX @@ -17,6 +17,9 @@ $ trivy sbom /path/to/sbom_file ``` +By default, vulnerability scan in SBOM is executed. You can use `--scanners vuln,license` +command property to select also license scan, or `--scanners license` alone. + !!! note Passing SBOMs generated by tool other than Trivy may result in inaccurate detection because Trivy relies on custom properties in SBOM for accurate scanning. diff --git a/go.mod b/go.mod index ce787a0c2565..0e585260a019 100644 --- a/go.mod +++ b/go.mod @@ -4,8 +4,8 @@ go 1.21 require ( github.com/Azure/azure-sdk-for-go v68.0.0+incompatible - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 github.com/BurntSushi/toml v1.3.2 github.com/CycloneDX/cyclonedx-go v0.8.0 github.com/GoogleCloudPlatform/docker-credential-gcr v2.0.5+incompatible @@ -26,37 +26,37 @@ require ( github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 github.com/aquasecurity/trivy-kubernetes v0.6.3 github.com/aquasecurity/trivy-policies v0.10.0 - github.com/aws/aws-sdk-go-v2 v1.25.2 - github.com/aws/aws-sdk-go-v2/config v1.27.4 - github.com/aws/aws-sdk-go-v2/credentials v1.17.4 + github.com/aws/aws-sdk-go-v2 v1.26.1 + github.com/aws/aws-sdk-go-v2/config v1.27.10 + github.com/aws/aws-sdk-go-v2/credentials v1.17.10 github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15 - github.com/aws/aws-sdk-go-v2/service/ec2 v1.149.1 + github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1 github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6 - github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1 - github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 + github.com/aws/aws-sdk-go-v2/service/s3 v1.53.1 + github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c github.com/bmatcuk/doublestar/v4 v4.6.1 github.com/cenkalti/backoff v2.2.1+incompatible github.com/cheggaaa/pb/v3 v3.1.4 github.com/containerd/containerd v1.7.13 github.com/csaf-poc/csaf_distribution/v3 v3.0.0 - github.com/docker/docker v25.0.3+incompatible + github.com/docker/docker v25.0.5+incompatible github.com/docker/go-connections v0.5.0 github.com/fatih/color v1.16.0 github.com/go-git/go-git/v5 v5.11.0 github.com/go-openapi/runtime v0.27.1 - github.com/go-openapi/strfmt v0.22.0 + github.com/go-openapi/strfmt v0.23.0 github.com/go-redis/redis/v8 v8.11.5 github.com/golang-jwt/jwt v3.2.2+incompatible github.com/golang/protobuf v1.5.3 github.com/google/go-containerregistry v0.19.0 github.com/google/licenseclassifier/v2 v2.0.0 github.com/google/uuid v1.6.0 - github.com/google/wire v0.5.0 + github.com/google/wire v0.6.0 github.com/hashicorp/go-getter v1.7.3 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-retryablehttp v0.7.5 - github.com/hashicorp/golang-lru/v2 v2.0.6 + github.com/hashicorp/golang-lru/v2 v2.0.7 github.com/in-toto/in-toto-golang v0.9.0 github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f github.com/knqyf263/go-deb-version v0.0.0-20230223133812-3ed183d23422 @@ -96,14 +96,14 @@ require ( github.com/spf13/cobra v1.8.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.18.2 - github.com/stretchr/testify v1.8.4 + github.com/stretchr/testify v1.9.0 github.com/testcontainers/testcontainers-go v0.28.0 - github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0 - github.com/tetratelabs/wazero v1.6.0 + github.com/testcontainers/testcontainers-go/modules/localstack v0.28.0 + github.com/tetratelabs/wazero v1.7.0 github.com/twitchtv/twirp v8.1.2+incompatible github.com/xeipuuv/gojsonschema v1.2.0 github.com/xlab/treeprint v1.2.0 - go.etcd.io/bbolt v1.3.8 + go.etcd.io/bbolt v1.3.9 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa golang.org/x/mod v0.15.0 @@ -112,7 +112,7 @@ require ( golang.org/x/term v0.17.0 golang.org/x/text v0.14.0 golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 - google.golang.org/protobuf v1.32.0 + google.golang.org/protobuf v1.33.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/api v0.29.1 k8s.io/utils v0.0.0-20231127182322-b307cd553661 @@ -123,7 +123,7 @@ require ( github.com/alecthomas/chroma v0.10.0 github.com/antchfx/htmlquery v1.3.0 github.com/apparentlymart/go-cidr v1.1.0 - github.com/aws/smithy-go v1.20.1 + github.com/aws/smithy-go v1.20.2 github.com/hashicorp/go-uuid v1.0.3 github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/hc-install v0.6.3 @@ -132,7 +132,6 @@ require ( github.com/liamg/iamgo v0.0.9 github.com/liamg/memoryfs v1.6.0 github.com/mitchellh/go-homedir v1.1.0 - github.com/olekukonko/tablewriter v0.0.5 github.com/owenrumney/squealer v1.2.2 github.com/zclconf/go-cty v1.14.1 github.com/zclconf/go-cty-yaml v1.0.3 @@ -149,7 +148,7 @@ require ( dario.cat/mergo v1.0.0 // indirect github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 // indirect github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest v0.11.29 // indirect @@ -157,7 +156,7 @@ require ( github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 // indirect github.com/Intevation/gval v1.3.0 // indirect github.com/Intevation/jsonpath v0.2.1 // indirect github.com/MakeNowJust/heredoc v1.0.0 // indirect @@ -177,12 +176,12 @@ require ( github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/aws/aws-sdk-go v1.49.21 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.5 // indirect github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7 // indirect github.com/aws/aws-sdk-go-v2/service/apigateway v1.21.6 // indirect github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.18.6 // indirect @@ -203,11 +202,11 @@ require ( github.com/aws/aws-sdk-go-v2/service/elasticsearchservice v1.25.0 // indirect github.com/aws/aws-sdk-go-v2/service/emr v1.36.0 // indirect github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.7 // indirect github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.5 // indirect github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 // indirect github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6 // indirect github.com/aws/aws-sdk-go-v2/service/kms v1.27.7 // indirect @@ -219,8 +218,8 @@ require ( github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0 // indirect github.com/aws/aws-sdk-go-v2/service/sns v1.26.6 // indirect github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.20.1 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 // indirect github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect @@ -264,8 +263,9 @@ require ( github.com/go-ini/ini v1.67.0 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-openapi/analysis v0.21.5 // indirect - github.com/go-openapi/errors v0.21.0 // indirect + github.com/go-openapi/errors v0.22.0 // indirect github.com/go-openapi/jsonpointer v0.20.1 // indirect github.com/go-openapi/jsonreference v0.20.3 // indirect github.com/go-openapi/loads v0.21.3 // indirect @@ -279,7 +279,7 @@ require ( github.com/gofrs/uuid v4.3.1+incompatible // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v4 v4.5.0 // indirect - github.com/golang-jwt/jwt/v5 v5.0.0 // indirect + github.com/golang-jwt/jwt/v5 v5.2.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/btree v1.1.2 // indirect github.com/google/gnostic-models v0.6.8 // indirect @@ -315,6 +315,7 @@ require ( github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect github.com/lib/pq v1.10.9 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect @@ -345,9 +346,10 @@ require ( github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/peterbourgon/diskv v2.0.1+incompatible // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect - github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/prometheus/client_golang v1.19.0 // indirect github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.48.0 // indirect @@ -362,13 +364,17 @@ require ( github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect github.com/sergi/go-diff v1.3.1 // indirect github.com/shibumi/go-pathspec v1.3.0 // indirect + github.com/shirou/gopsutil/v3 v3.23.12 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/skeema/knownhosts v1.2.1 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spf13/afero v1.11.0 // indirect - github.com/stretchr/objx v0.5.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/tchap/go-patricia/v2 v2.3.1 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect github.com/ulikunitz/xz v0.5.11 // indirect github.com/vbatts/tar-split v0.11.3 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect @@ -376,7 +382,8 @@ require ( github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/gopher-lua v1.1.0 // indirect - go.mongodb.org/mongo-driver v1.13.1 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect + go.mongodb.org/mongo-driver v1.14.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect @@ -390,7 +397,7 @@ require ( golang.org/x/oauth2 v0.16.0 // indirect golang.org/x/sys v0.17.0 // indirect golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.16.1 // indirect + golang.org/x/tools v0.17.0 // indirect google.golang.org/api v0.155.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 // indirect @@ -427,7 +434,3 @@ require ( sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect sigs.k8s.io/yaml v1.4.0 // indirect ) - -// testcontainers-go has a bug with versions v0.25.0 and v0.26.0 -// ref: https://github.com/testcontainers/testcontainers-go/issues/1782 -replace github.com/testcontainers/testcontainers-go => github.com/testcontainers/testcontainers-go v0.23.0 diff --git a/go.sum b/go.sum index 6319723f6b29..1f149fc1b692 100644 --- a/go.sum +++ b/go.sum @@ -196,12 +196,12 @@ github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0/go.mod github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 h1:fb8kj/Dh4CSwgsOzHeZY4Xh68cFVbzXx+ONXGMY//4w= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0/go.mod h1:uReU2sSxZExRPBAg3qKzmAucSi51+SP1OhohieR821Q= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 h1:BMAjVKJM0U/CYF27gA0ZMmXGkOcvfFtD0oHVZ1TIPRI= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0/go.mod h1:1fXstnBMas5kzG+S3q8UoJcmyU6nUeunJcMDHcRYHhs= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0 h1:d81/ng9rET2YqdVkVwkb6EXeRrLJIwyGnJcAlAWKwhs= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= @@ -227,8 +227,8 @@ github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+Z github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 h1:WpB/QDNLpMw72xHJc34BNNykqSOeEJDAWkhf0u12/Jk= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= @@ -368,26 +368,26 @@ github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZo github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aws/aws-sdk-go v1.49.21 h1:Rl8KW6HqkwzhATwvXhyr7vD4JFUMi7oXGAw9SrxxIFY= github.com/aws/aws-sdk-go v1.49.21/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= -github.com/aws/aws-sdk-go-v2 v1.25.2 h1:/uiG1avJRgLGiQM9X3qJM8+Qa6KRGK5rRPuXE0HUM+w= -github.com/aws/aws-sdk-go-v2 v1.25.2/go.mod h1:Evoc5AsmtveRt1komDwIsjHFyrP5tDuF1D1U+6z6pNo= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 h1:gTK2uhtAPtFcdRRJilZPx8uJLL2J85xK11nKtWL0wfU= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1/go.mod h1:sxpLb+nZk7tIfCWChfd+h4QwHNUR57d8hA1cleTkjJo= -github.com/aws/aws-sdk-go-v2/config v1.27.4 h1:AhfWb5ZwimdsYTgP7Od8E9L1u4sKmDW2ZVeLcf2O42M= -github.com/aws/aws-sdk-go-v2/config v1.27.4/go.mod h1:zq2FFXK3A416kiukwpsd+rD4ny6JC7QSkp4QdN1Mp2g= -github.com/aws/aws-sdk-go-v2/credentials v1.17.4 h1:h5Vztbd8qLppiPwX+y0Q6WiwMZgpd9keKe2EAENgAuI= -github.com/aws/aws-sdk-go-v2/credentials v1.17.4/go.mod h1:+30tpwrkOgvkJL1rUZuRLoxcJwtI/OkeBLYnHxJtVe0= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 h1:AK0J8iYBFeUk2Ax7O8YpLtFsfhdOByh2QIkHmigpRYk= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2/go.mod h1:iRlGzMix0SExQEviAyptRWRGdYNo3+ufW/lCzvKVTUc= +github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= +github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 h1:x6xsQXGSmW6frevwDA+vi/wqhp1ct18mVXYN08/93to= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2/go.mod h1:lPprDr1e6cJdyYeGXnRaJoP4Md+cDBvi2eOj00BlGmg= +github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= +github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10/go.mod h1:6t3sucOaYDwDssHQa0ojH1RpmVmF5/jArkye1b2FKMI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 h1:FVJ0r5XTHSmIHJV6KuDmdYhEpvlHpiSd38RQWhut5J4= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1/go.mod h1:zusuAeqezXzAB24LGuzuekqMAEgWkVYukBec3kr3jUg= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15 h1:2MUXyGW6dVaQz6aqycpbdLIH1NMcUI6kW6vQ0RabGYg= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15/go.mod h1:aHbhbR6WEQgHAiRj41EQ2W47yOYwNtIkWTXmcAtYqj8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2 h1:bNo4LagzUKbjdxE0tIcR9pMzLR2U/Tgie1Hq1HQ3iH8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2/go.mod h1:wRQv0nN6v9wDXuWThpovGQjqF1HFdcgWjporw14lS8k= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2 h1:EtOU5jsPdIQNP+6Q2C5e3d65NKT1PeCiQk+9OdzO12Q= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2/go.mod h1:tyF5sKccmDz0Bv4NrstEr+/9YkSPJHrcO7UsUKf7pWM= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 h1:en92G0Z7xlksoOylkUhuBSfJgijC7rHVLRdnIlHEs0E= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2/go.mod h1:HgtQ/wN5G+8QSlK62lbOtNwQ3wTSByJ4wH2rCkPt+AE= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.5 h1:81KE7vaZzrl7yHBYHVEzYB8sypz11NMOZ40YlWvPxsU= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.5/go.mod h1:LIt2rg7Mcgn09Ygbdh/RdIm0rQ+3BNkbP1gyVMFtRK0= github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7 h1:rLdKcienXrk+JFX1+DZg160ebG8lIF2nFvnEZL7dnII= github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7/go.mod h1:cwqaWBOZXu8pqEE1ZC4Sw2ycZLjwKrRP5tOAJFgCbYc= github.com/aws/aws-sdk-go-v2/service/apigateway v1.21.6 h1:ePPaOVn92r5n8Neecdpy93hDmR0PBH6H6b7VQCE5vKE= @@ -412,8 +412,8 @@ github.com/aws/aws-sdk-go-v2/service/dynamodb v1.26.8 h1:XKO0BswTDeZMLDBd/b5pCEZ github.com/aws/aws-sdk-go-v2/service/dynamodb v1.26.8/go.mod h1:N5tqZcYMM0N1PN7UQYJNWuGyO886OfnMhf/3MAbqMcI= github.com/aws/aws-sdk-go-v2/service/ebs v1.21.7 h1:CRzzXjmgx9p362yO39D6hbZULdMI23gaKqSxijJCXHM= github.com/aws/aws-sdk-go-v2/service/ebs v1.21.7/go.mod h1:wnsHqpi3RgDwklS5SPHUgjcUUpontGPKJ+GJYOdV7pY= -github.com/aws/aws-sdk-go-v2/service/ec2 v1.149.1 h1:OGZUMBYZnz+R5nkW6FS1J8UlfLeM/pKojck+74+ZQGY= -github.com/aws/aws-sdk-go-v2/service/ec2 v1.149.1/go.mod h1:XxJNg7fIkR8cbm89i0zVZSxKpcPYsC8BWRwMIJOWbnk= +github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1 h1:JBwnHlQvL39eeT03+vmBZuziutTKljmOKboKxQuIBck= +github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1/go.mod h1:xejKuuRDjz6z5OqyeLsz01MlOqqW7CqpAB4PabNvpu8= github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6 h1:cT7h+GWP2k0hJSsPmppKgxl4C9R6gCC5/oF4oHnmpK4= github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6/go.mod h1:AOHmGMoPtSY9Zm2zBuwUJQBisIvYAZeA1n7b6f4e880= github.com/aws/aws-sdk-go-v2/service/ecs v1.35.6 h1:Sc2mLjyA1R8z2l705AN7Wr7QOlnUxVnGPJeDIVyUSrs= @@ -432,16 +432,16 @@ github.com/aws/aws-sdk-go-v2/service/emr v1.36.0 h1:FdeZ7AYOvyL09KH250Ncz4LF4SB1 github.com/aws/aws-sdk-go-v2/service/emr v1.36.0/go.mod h1:Drh6y2qLaw/wnDKTIcdqM2m358MIRXsZ2Bj2tjhVLq0= github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 h1:FKPRDYZOO0Eur19vWUL1B40Op0j89KQj3kARjrszMK8= github.com/aws/aws-sdk-go-v2/service/iam v1.28.7/go.mod h1:YzMYyQ7S4twfYzLjwP24G1RAxypozVZeNaG1r2jxRms= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 h1:EyBZibRTVAs6ECHZOw5/wlylS9OcTzwyjeQMudmREjE= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1/go.mod h1:JKpmtYhhPs7D97NL/ltqz7yCkERFW5dOlHyVl66ZYF8= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 h1:zSdTXYLwuXDNPUS+V41i1SFDXG7V0ITp0D9UT9Cvl18= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2/go.mod h1:v8m8k+qVy95nYi7d56uP1QImleIIY25BPiNJYzPBdFE= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.7 h1:ZMeFZ5yk+Ek+jNr1+uwCd2tG89t6oTS5yVWpa6yy2es= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.7/go.mod h1:mxV05U+4JiHqIpGqqYXOHLPKUC6bDXC44bsUhNjOEwY= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 h1:e9AVb17H4x5FTE5KWIP5M1Du+9M86pS+Hw0lBUdN8EY= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11/go.mod h1:B90ZQJa36xo0ph9HsoteI1+r8owgQH/U1QNfqZQkj1Q= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2 h1:5ffmXjPtwRExp1zc7gENLgCPyHFbhEPwVTkTiH9niSk= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2/go.mod h1:Ru7vg1iQ7cR4i7SZ/JTLYN9kaXtbL69UdgG0OQWQxW0= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 h1:1oY1AVEisRI4HNuFoLdRUB0hC63ylDAN6Me3MrfclEg= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2/go.mod h1:KZ03VgvZwSjkT7fOetQ/wF3MZUvYFirlI1H5NklUNsY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.5 h1:f9RyWNtS8oH7cZlbn+/JNPpjUk5+5fLd5lM9M0i49Ys= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.5/go.mod h1:h5CoMZV2VF297/VLhRhO1WF+XYWOzXo+4HsObA4HjBQ= github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 h1:yCkyZDGahaCaAkdpVx8Te05t6eW2FarBLunVC8S23nU= github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5/go.mod h1:/KmX+vXMPJGAB56reo95tnsXa6QPNx6qli4L1AmYb7E= github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6 h1:FO/aIHk86VePDUh/3Q/A5pnvu45miO1GZB8rIq2BUlA= @@ -458,24 +458,24 @@ github.com/aws/aws-sdk-go-v2/service/rds v1.66.1 h1:TafjIpDW/+l7s+f3EIONaFsNvNfw github.com/aws/aws-sdk-go-v2/service/rds v1.66.1/go.mod h1:MYzRMSdY70kcS8AFg0aHmk/xj6VAe0UfaCCoLrBWPow= github.com/aws/aws-sdk-go-v2/service/redshift v1.39.7 h1:k4WaqQ7LHSGrSftCRXTRLv7WaozXu+fZ1jdisQSR2eU= github.com/aws/aws-sdk-go-v2/service/redshift v1.39.7/go.mod h1:8hU0Ax6q6QA+jrMcWTE0A4YH594MQoWP3EzGO3GH5Dw= -github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1 h1:juZ+uGargZOrQGNxkVHr9HHR/0N+Yu8uekQnV7EAVRs= -github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1/go.mod h1:SoR0c7Jnq8Tpmt0KSLXIavhjmaagRqQpe9r70W3POJg= +github.com/aws/aws-sdk-go-v2/service/s3 v1.53.1 h1:6cnno47Me9bRykw9AEv9zkXE+5or7jz8TsskTTccbgc= +github.com/aws/aws-sdk-go-v2/service/s3 v1.53.1/go.mod h1:qmdkIIAC+GCLASF7R2whgNrJADz0QZPX+Seiw/i4S3o= github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0 h1:dPCRgAL4WD9tSMaDglRNGOiAtSTjkwNiUW5GDpWFfHA= github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0/go.mod h1:4Ae1NCLK6ghmjzd45Tc33GgCKhUWD2ORAlULtMO1Cbs= github.com/aws/aws-sdk-go-v2/service/sns v1.26.6 h1:w2YwF8889ardGU3Y0qZbJ4Zzh+Q/QqKZ4kwkK7JFvnI= github.com/aws/aws-sdk-go-v2/service/sns v1.26.6/go.mod h1:IrcbquqMupzndZ20BXxDxjM7XenTRhbwBOetk4+Z5oc= github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6 h1:UdbDTllc7cmusTTMy1dcTrYKRl4utDEsmKh9ZjvhJCc= github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6/go.mod h1:mCUv04gd/7g+/HNzDB4X6dzJuygji0ckvB3Lg/TdG5Y= -github.com/aws/aws-sdk-go-v2/service/sso v1.20.1 h1:utEGkfdQ4L6YW/ietH7111ZYglLJvS+sLriHJ1NBJEQ= -github.com/aws/aws-sdk-go-v2/service/sso v1.20.1/go.mod h1:RsYqzYr2F2oPDdpy+PdhephuZxTfjHQe7SOBcZGoAU8= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1 h1:9/GylMS45hGGFCcMrUZDVayQE1jYSIN6da9jo7RAYIw= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1/go.mod h1:YjAPFn4kGFqKC54VsHs5fn5B6d+PCY2tziEa3U/GB5Y= -github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 h1:3I2cBEYgKhrWlwyZgfpSO2BpaMY1LHPqXYk/QGlu2ew= -github.com/aws/aws-sdk-go-v2/service/sts v1.28.1/go.mod h1:uQ7YYKZt3adCRrdCBREm1CD3efFLOUNH77MrUCvx5oA= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4/go.mod h1:mUYPBhaF2lGiukDEjJX2BLRRKTmoUSitGDUgM4tRxak= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n2HZPkcKgPAi1phU= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1 h1:pqxn3fcZDgWmo8GMUjlxVBdakcGo0AeUb7mjX33pJIQ= github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1/go.mod h1:kP5rUlnqfno/obflnKX4KMBWkoVHLDI8oCka9U0opRo= -github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw= -github.com/aws/smithy-go v1.20.1/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= +github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= @@ -717,8 +717,8 @@ github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBi github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v23.0.0-rc.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v25.0.3+incompatible h1:D5fy/lYmY7bvZa0XTZ5/UJPljor41F+vdyJG5luQLfQ= -github.com/docker/docker v25.0.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v25.0.5+incompatible h1:UmQydMduGkrD5nQde1mecF/YnSbTOaPeFIeP5C4W+DE= +github.com/docker/docker v25.0.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= @@ -827,10 +827,12 @@ github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/analysis v0.21.5 h1:3tHfEBh6Ia8eKc4M7khOGjPOAlWKJ10d877Cr9teujI= github.com/go-openapi/analysis v0.21.5/go.mod h1:25YcZosX9Lwz2wBsrFrrsL8bmjjXdlyP6zsr2AMy29M= -github.com/go-openapi/errors v0.21.0 h1:FhChC/duCnfoLj1gZ0BgaBmzhJC2SL/sJr8a2vAobSY= -github.com/go-openapi/errors v0.21.0/go.mod h1:jxNTMUxRCKj65yb/okJGEtahVd7uvWnuWfj53bse4ho= +github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= +github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonpointer v0.20.1 h1:MkK4VEIEZMj4wT9PmjaUmGflVBr9nvud4Q4UVFbDoBE= @@ -846,8 +848,8 @@ github.com/go-openapi/runtime v0.27.1/go.mod h1:fijeJEiEclyS8BRurYE1DE5TLb9/KZl6 github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo= github.com/go-openapi/spec v0.20.12 h1:cgSLbrsmziAP2iais+Vz7kSazwZ8rsUZd6TUzdDgkVI= github.com/go-openapi/spec v0.20.12/go.mod h1:iSCgnBcwbMW9SfzJb8iYynXvcY6C/QFrI7otzF7xGM4= -github.com/go-openapi/strfmt v0.22.0 h1:Ew9PnEYc246TwrEspvBdDHS4BVKXy/AOVsfqGDgAcaI= -github.com/go-openapi/strfmt v0.22.0/go.mod h1:HzJ9kokGIju3/K6ap8jL+OlGAbjpSv27135Yr9OivU4= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.22.5 h1:fVS63IE3M0lsuWRzuom3RLwUMVI2peDH01s6M70ugys= @@ -907,8 +909,8 @@ github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= -github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.0 h1:uCdmnmatrKCgMBlM4rMuJZWOkPDqdbZPnrMXDY4gI68= github.com/golang/glog v1.2.0/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= @@ -948,7 +950,6 @@ github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= @@ -1016,7 +1017,7 @@ github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= -github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -1024,8 +1025,8 @@ github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8= -github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= +github.com/google/wire v0.6.0 h1:HBkoIh4BdSxoyo9PveV8giw7ZsaBOvzWKfcg/6MrVwI= +github.com/google/wire v0.6.0/go.mod h1:F4QhpQ9EDIdJ1Mbop/NZBRB+5yrR6qg3BnctaoUk6NA= github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= @@ -1110,8 +1111,8 @@ github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.6.0 h1:uL2shRDx7RTrOrTCUZEGP/wJUFiUI8QT6E7z5o8jga4= github.com/hashicorp/golang-lru v0.6.0/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/golang-lru/v2 v2.0.6 h1:3xi/Cafd1NaoEnS/yDssIiuVeDVywU0QdFGl3aQaQHM= -github.com/hashicorp/golang-lru/v2 v2.0.6/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hc-install v0.6.3 h1:yE/r1yJvWbtrJ0STwScgEnCanb0U9v7zp0Gbkmcoxqs= github.com/hashicorp/hc-install v0.6.3/go.mod h1:KamGdbodYzlufbWh4r9NRo8y6GLHWZP2GBtdnms1Ln0= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= @@ -1181,7 +1182,6 @@ github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYs github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.0/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= @@ -1230,6 +1230,8 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0= github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 h1:EnfXoSqDfSNJv0VBNqY/88RNnhSGYkrHaO0mmFGbVsc= github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40/go.mod h1:vy1vK6wD6j7xX6O6hXe621WabdtNkou2h7uRtTfRMyg= github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg= @@ -1279,7 +1281,6 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU= github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= @@ -1353,7 +1354,6 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0= github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= @@ -1371,8 +1371,6 @@ github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -1443,8 +1441,8 @@ github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1H github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -1455,6 +1453,8 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/poy/onpar v1.1.2 h1:QaNrNiZx0+Nar5dLgTVp5mXkyoVFIbepjyEoGSnhbAY= github.com/poy/onpar v1.1.2/go.mod h1:6X8FLNoxyr9kkmnlqpK6LSoiOtrO6MICtWwEuWkLjzg= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= @@ -1535,6 +1535,12 @@ github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= @@ -1599,8 +1605,9 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= @@ -1612,8 +1619,9 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= @@ -1625,12 +1633,16 @@ github.com/tchap/go-patricia/v2 v2.3.1 h1:6rQp39lgIYZ+MHmdEq4xzuk1t7OdC35z/xm0BG github.com/tchap/go-patricia/v2 v2.3.1/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k= github.com/terminalstatic/go-xsd-validate v0.1.5 h1:RqpJnf6HGE2CB/lZB1A8BYguk8uRtcvYAPLCF15qguo= github.com/terminalstatic/go-xsd-validate v0.1.5/go.mod h1:18lsvYFofBflqCrvo1umpABZ99+GneNTw2kEEc8UPJw= -github.com/testcontainers/testcontainers-go v0.23.0 h1:ERYTSikX01QczBLPZpqsETTBO7lInqEP349phDOVJVs= -github.com/testcontainers/testcontainers-go v0.23.0/go.mod h1:3gzuZfb7T9qfcH2pHpV4RLlWrPjeWNQah6XlYQ32c4I= -github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0 h1:lpL04dHA9mGFBQLFcV+aEEh1Tf4ohXdIGgoj3J0bacM= -github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0/go.mod h1:1xkZPpkBu6coI7CyVn3DXUBnsVrZ+fd/Cc8lx6zk2mk= -github.com/tetratelabs/wazero v1.6.0 h1:z0H1iikCdP8t+q341xqepY4EWvHEw8Es7tlqiVzlP3g= -github.com/tetratelabs/wazero v1.6.0/go.mod h1:0U0G41+ochRKoPKCJlh0jMg1CHkyfK8kDqiirMmKY8A= +github.com/testcontainers/testcontainers-go v0.28.0 h1:1HLm9qm+J5VikzFDYhOd+Zw12NtOl+8drH2E8nTY1r8= +github.com/testcontainers/testcontainers-go v0.28.0/go.mod h1:COlDpUXbwW3owtpMkEB1zo9gwb1CoKVKlyrVPejF4AU= +github.com/testcontainers/testcontainers-go/modules/localstack v0.28.0 h1:NOtK4tz2J1KbdAV6Lk9AQPUXB6Op8jGzKNfwVCThRxU= +github.com/testcontainers/testcontainers-go/modules/localstack v0.28.0/go.mod h1:nLimAfgHTQfaDZ2cO8/B4Z1qr8e020sM3ybpSsOVAUY= +github.com/tetratelabs/wazero v1.7.0 h1:jg5qPydno59wqjpGrHph81lbtHzTrWzwwtD4cD88+hQ= +github.com/tetratelabs/wazero v1.7.0/go.mod h1:ytl6Zuh20R/eROuyDaGPkp82O9C/DJfXAwJfQ3X6/7Y= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/twitchtv/twirp v8.1.2+incompatible h1:0O6TfzZW09ZP5r+ORA90XQEE3PTgA6C7MBbl2KxvVgE= @@ -1659,9 +1671,6 @@ github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= -github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -1676,7 +1685,6 @@ github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yashtewari/glob-intersection v0.2.0 h1:8iuHdN88yYuCzCdjt0gDe+6bAhUwBeEWqThExu54RFg= github.com/yashtewari/glob-intersection v0.2.0/go.mod h1:LK7pIC3piUjovexikBbJ26Yml7g8xa5bsjfx2v1fwok= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -1685,6 +1693,8 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE= github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43 h1:+lm10QQTNSBd8DVTNGHx7o/IKu9HYDvLMffDhbyLccI= github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50 h1:hlE8//ciYMztlGpl/VA+Zm1AcTPHYkHJPbHqE6WJUXE= @@ -1699,14 +1709,14 @@ github.com/zclconf/go-cty-yaml v1.0.3/go.mod h1:9YLUH4g7lOhVWqUbctnVlZ5KLpg7JApr go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= -go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= -go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI= +go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE= go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= -go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= @@ -1774,6 +1784,8 @@ golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -1814,6 +1826,9 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1882,6 +1897,9 @@ golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1926,6 +1944,8 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -2000,6 +2020,7 @@ golang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -2015,7 +2036,6 @@ golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -2046,6 +2066,11 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -2055,6 +2080,9 @@ golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2070,6 +2098,8 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2088,7 +2118,6 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -2140,8 +2169,10 @@ golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.16.1 h1:TLyB3WofjdOEepBHAU20JdNC1Zbg87elYofWYAY5oZA= -golang.org/x/tools v0.16.1/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= +golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2380,8 +2411,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/integration/client_server_test.go b/integration/client_server_test.go index 2b21479e0f16..f217021658ae 100644 --- a/integration/client_server_test.go +++ b/integration/client_server_test.go @@ -242,6 +242,16 @@ func TestClientServer(t *testing.T) { }, golden: "testdata/pom.json.golden", }, + { + name: "scan package-lock.json with repo command in client/server mode", + args: csArgs{ + Command: "repo", + RemoteAddrOption: "--server", + Target: "testdata/fixtures/repo/npm/", + ListAllPackages: true, + }, + golden: "testdata/npm.json.golden", + }, { name: "scan sample.pem with repo command in client/server mode", args: csArgs{ @@ -588,6 +598,10 @@ func setupClient(t *testing.T, c csArgs, addr string, cacheDir string, golden st osArgs = append(osArgs, "--format", "json") } + if c.ListAllPackages { + osArgs = append(osArgs, "--list-all-pkgs") + } + if c.IgnoreUnfixed { osArgs = append(osArgs, "--ignore-unfixed") } diff --git a/integration/sbom_test.go b/integration/sbom_test.go index dc18cb43bceb..65c99f9e9600 100644 --- a/integration/sbom_test.go +++ b/integration/sbom_test.go @@ -6,11 +6,11 @@ import ( "path/filepath" "testing" + ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" ) @@ -19,6 +19,7 @@ func TestSBOM(t *testing.T) { input string format string artifactType string + scanners string } tests := []struct { name string @@ -150,6 +151,16 @@ func TestSBOM(t *testing.T) { }, }, }, + { + name: "license check cyclonedx json", + args: args{ + input: "testdata/fixtures/sbom/license-cyclonedx.json", + format: "json", + artifactType: "cyclonedx", + scanners: "license", + }, + golden: "testdata/license-cyclonedx.json.golden", + }, } // Set up testing DB @@ -157,6 +168,11 @@ func TestSBOM(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + scanners := "vuln" + if tt.args.scanners != "" { + scanners = tt.args.scanners + } + osArgs := []string{ "--cache-dir", cacheDir, @@ -165,6 +181,8 @@ func TestSBOM(t *testing.T) { "--skip-db-update", "--format", tt.args.format, + "--scanners", + scanners, } // Set up the output file @@ -223,5 +241,10 @@ func compareSBOMReports(t *testing.T, wantFile, gotFile string, overrideWant typ } got := readReport(t, gotFile) + // when running on Windows FS + got.ArtifactName = filepath.ToSlash(filepath.Clean(got.ArtifactName)) + for i, result := range got.Results { + got.Results[i].Target = filepath.ToSlash(filepath.Clean(result.Target)) + } assert.Equal(t, want, got) } diff --git a/integration/testdata/conda-spdx.json.golden b/integration/testdata/conda-spdx.json.golden index be1146b285c4..db81eb8abd13 100644 --- a/integration/testdata/conda-spdx.json.golden +++ b/integration/testdata/conda-spdx.json.golden @@ -3,7 +3,7 @@ "dataLicense": "CC0-1.0", "SPDXID": "SPDXRef-DOCUMENT", "name": "testdata/fixtures/repo/conda", - "documentNamespace": "http://aquasecurity.github.io/trivy/filesystem/testdata/fixtures/repo/conda-3ff14136-e09f-4df9-80ea-000000000001", + "documentNamespace": "http://aquasecurity.github.io/trivy/filesystem/testdata/fixtures/repo/conda-3ff14136-e09f-4df9-80ea-000000000004", "creationInfo": { "creators": [ "Organization: aquasecurity", @@ -12,17 +12,9 @@ "created": "2021-08-25T12:20:30Z" }, "packages": [ - { - "name": "conda-pkg", - "SPDXID": "SPDXRef-Application-ee5ef1aa4ac89125", - "downloadLocation": "NONE", - "filesAnalyzed": false, - "sourceInfo": "Conda", - "primaryPackagePurpose": "APPLICATION" - }, { "name": "openssl", - "SPDXID": "SPDXRef-Package-20b95c21bfbf9fc4", + "SPDXID": "SPDXRef-Package-b8061a5279413d55", "versionInfo": "1.1.1q", "supplier": "NOASSERTION", "downloadLocation": "NONE", @@ -39,11 +31,14 @@ "referenceLocator": "pkg:conda/openssl@1.1.1q" } ], + "attributionTexts": [ + "PkgType: conda-pkg" + ], "primaryPackagePurpose": "LIBRARY" }, { "name": "pip", - "SPDXID": "SPDXRef-Package-11a429ec3bd01d80", + "SPDXID": "SPDXRef-Package-84198b3828050c11", "versionInfo": "22.2.2", "supplier": "NOASSERTION", "downloadLocation": "NONE", @@ -60,6 +55,9 @@ "referenceLocator": "pkg:conda/pip@22.2.2" } ], + "attributionTexts": [ + "PkgType: conda-pkg" + ], "primaryPackagePurpose": "LIBRARY" }, { @@ -105,27 +103,22 @@ }, { "spdxElementId": "SPDXRef-Filesystem-2e2426fd0f2580ef", - "relatedSpdxElement": "SPDXRef-Application-ee5ef1aa4ac89125", + "relatedSpdxElement": "SPDXRef-Package-84198b3828050c11", "relationshipType": "CONTAINS" }, { - "spdxElementId": "SPDXRef-Application-ee5ef1aa4ac89125", - "relatedSpdxElement": "SPDXRef-Package-20b95c21bfbf9fc4", - "relationshipType": "CONTAINS" - }, - { - "spdxElementId": "SPDXRef-Package-20b95c21bfbf9fc4", - "relatedSpdxElement": "SPDXRef-File-600e5e0110a84891", + "spdxElementId": "SPDXRef-Filesystem-2e2426fd0f2580ef", + "relatedSpdxElement": "SPDXRef-Package-b8061a5279413d55", "relationshipType": "CONTAINS" }, { - "spdxElementId": "SPDXRef-Application-ee5ef1aa4ac89125", - "relatedSpdxElement": "SPDXRef-Package-11a429ec3bd01d80", + "spdxElementId": "SPDXRef-Package-84198b3828050c11", + "relatedSpdxElement": "SPDXRef-File-7eb62e2a3edddc0a", "relationshipType": "CONTAINS" }, { - "spdxElementId": "SPDXRef-Package-11a429ec3bd01d80", - "relatedSpdxElement": "SPDXRef-File-7eb62e2a3edddc0a", + "spdxElementId": "SPDXRef-Package-b8061a5279413d55", + "relatedSpdxElement": "SPDXRef-File-600e5e0110a84891", "relationshipType": "CONTAINS" } ] diff --git a/integration/testdata/fixtures/sbom/license-cyclonedx.json b/integration/testdata/fixtures/sbom/license-cyclonedx.json new file mode 100644 index 000000000000..e8353ca609cc --- /dev/null +++ b/integration/testdata/fixtures/sbom/license-cyclonedx.json @@ -0,0 +1,125 @@ +{ + "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "serialNumber": "urn:uuid:c09512e3-47e7-4eff-8f76-5d7ae72b26a5", + "version": 1, + "metadata": { + "timestamp": "2024-03-10T14:57:31+00:00", + "tools": { + "components": [ + { + "type": "application", + "group": "aquasecurity", + "name": "trivy", + "version": "dev" + } + ] + }, + "component": { + "bom-ref": "acc9d4aa-4158-4969-a497-637e114fde0c", + "type": "application", + "name": "C:/Users/bedla.czech/IdeaProjects/sbom-demo", + "properties": [ + { + "name": "aquasecurity:trivy:SchemaVersion", + "value": "2" + } + ] + } + }, + "components": [ + { + "bom-ref": "eb56cd49-da98-4b08-bfc8-9880fb063cf1", + "type": "application", + "name": "pom.xml", + "properties": [ + { + "name": "aquasecurity:trivy:Class", + "value": "lang-pkgs" + }, + { + "name": "aquasecurity:trivy:Type", + "value": "pom" + } + ] + }, + { + "bom-ref": "pkg:maven/org.eclipse.sisu/org.eclipse.sisu.plexus@0.3.0.M1", + "type": "library", + "group": "org.eclipse.sisu", + "name": "org.eclipse.sisu.plexus", + "version": "0.3.0.M1", + "licenses": [ + { + "license": { + "name": "EPL-1.0" + } + } + ], + "purl": "pkg:maven/org.eclipse.sisu/org.eclipse.sisu.plexus@0.3.0.M1", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "org.eclipse.sisu:org.eclipse.sisu.plexus:0.3.0.M1" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "pom" + } + ] + }, + { + "bom-ref": "pkg:maven/org.ow2.asm/asm@9.5", + "type": "library", + "group": "org.ow2.asm", + "name": "asm", + "version": "9.5", + "licenses": [ + { + "license": { + "name": "BSD-3-Clause" + } + } + ], + "purl": "pkg:maven/org.ow2.asm/asm@9.5", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "org.ow2.asm:asm:9.5" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "pom" + } + ] + }, + { + "bom-ref": "pkg:maven/org.slf4j/slf4j-api@2.0.11", + "type": "library", + "group": "org.slf4j", + "name": "slf4j-api", + "version": "2.0.11", + "licenses": [ + { + "license": { + "name": "MIT License" + } + } + ], + "purl": "pkg:maven/org.slf4j/slf4j-api@2.0.11", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "org.slf4j:slf4j-api:2.0.11" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "pom" + } + ] + } + ], + "dependencies": [], + "vulnerabilities": [] +} diff --git a/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden b/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden index 40fdceb532c5..934bda200639 100644 --- a/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden +++ b/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden @@ -286,7 +286,7 @@ "bom-ref": "pkg:deb/debian/bsdutils@2.33.1-0.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "bsdutils", - "version": "2.33.1-0.1", + "version": "1:2.33.1-0.1", "licenses": [ { "license": { @@ -628,7 +628,7 @@ "bom-ref": "pkg:deb/debian/diffutils@3.7-3?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "diffutils", - "version": "3.7-3", + "version": "1:3.7-3", "licenses": [ { "license": { @@ -1338,7 +1338,7 @@ "bom-ref": "pkg:deb/debian/libattr1@2.4.48-4?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "libattr1", - "version": "2.4.48-4", + "version": "1:2.4.48-4", "licenses": [ { "license": { @@ -1396,7 +1396,7 @@ "bom-ref": "pkg:deb/debian/libaudit-common@2.8.4-3?arch=all&distro=debian-10.2&epoch=1", "type": "library", "name": "libaudit-common", - "version": "2.8.4-3", + "version": "1:2.8.4-3", "licenses": [ { "license": { @@ -1454,7 +1454,7 @@ "bom-ref": "pkg:deb/debian/libaudit1@2.8.4-3?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "libaudit1", - "version": "2.8.4-3", + "version": "1:2.8.4-3", "licenses": [ { "license": { @@ -2091,7 +2091,7 @@ "bom-ref": "pkg:deb/debian/libgcc1@8.3.0-6?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "libgcc1", - "version": "8.3.0-6", + "version": "1:8.3.0-6", "purl": "pkg:deb/debian/libgcc1@8.3.0-6?arch=amd64&distro=debian-10.2&epoch=1", "properties": [ { @@ -2285,7 +2285,7 @@ "bom-ref": "pkg:deb/debian/libgmp10@6.1.2%2Bdfsg-4?arch=amd64&distro=debian-10.2&epoch=2", "type": "library", "name": "libgmp10", - "version": "6.1.2+dfsg-4", + "version": "2:6.1.2+dfsg-4", "licenses": [ { "license": { @@ -3286,7 +3286,7 @@ "bom-ref": "pkg:deb/debian/libpcre3@8.39-12?arch=amd64&distro=debian-10.2&epoch=2", "type": "library", "name": "libpcre3", - "version": "8.39-12", + "version": "2:8.39-12", "purl": "pkg:deb/debian/libpcre3@8.39-12?arch=amd64&distro=debian-10.2&epoch=2", "properties": [ { @@ -4450,7 +4450,7 @@ "bom-ref": "pkg:deb/debian/login@4.5-1.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "login", - "version": "4.5-1.1", + "version": "1:4.5-1.1", "licenses": [ { "license": { @@ -4742,7 +4742,7 @@ "bom-ref": "pkg:deb/debian/passwd@4.5-1.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "passwd", - "version": "4.5-1.1", + "version": "1:4.5-1.1", "licenses": [ { "license": { @@ -5338,7 +5338,7 @@ "bom-ref": "pkg:deb/debian/ruby@2.5.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "ruby", - "version": "2.5.1", + "version": "1:2.5.1", "licenses": [ { "license": { @@ -5690,7 +5690,7 @@ "bom-ref": "pkg:deb/debian/zlib1g@1.2.11.dfsg-1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "zlib1g", - "version": "1.2.11.dfsg-1", + "version": "1:1.2.11.dfsg-1", "licenses": [ { "license": { diff --git a/integration/testdata/license-cyclonedx.json.golden b/integration/testdata/license-cyclonedx.json.golden new file mode 100644 index 000000000000..cf69da9756ed --- /dev/null +++ b/integration/testdata/license-cyclonedx.json.golden @@ -0,0 +1,65 @@ +{ + "SchemaVersion": 2, + "CreatedAt": "2021-08-25T12:20:30.000000005Z", + "ArtifactName": "testdata/fixtures/sbom/license-cyclonedx.json", + "ArtifactType": "cyclonedx", + "Metadata": { + "ImageConfig": { + "architecture": "", + "created": "0001-01-01T00:00:00Z", + "os": "", + "rootfs": { + "type": "", + "diff_ids": null + }, + "config": {} + } + }, + "Results": [ + { + "Target": "OS Packages", + "Class": "license" + }, + { + "Target": "pom.xml", + "Class": "license" + }, + { + "Target": "Java", + "Class": "license", + "Licenses": [ + { + "Severity": "MEDIUM", + "Category": "reciprocal", + "PkgName": "org.eclipse.sisu:org.eclipse.sisu.plexus", + "FilePath": "", + "Name": "EPL-1.0", + "Confidence": 1, + "Link": "" + }, + { + "Severity": "LOW", + "Category": "notice", + "PkgName": "org.ow2.asm:asm", + "FilePath": "", + "Name": "BSD-3-Clause", + "Confidence": 1, + "Link": "" + }, + { + "Severity": "UNKNOWN", + "Category": "unknown", + "PkgName": "org.slf4j:slf4j-api", + "FilePath": "", + "Name": "MIT License", + "Confidence": 1, + "Link": "" + } + ] + }, + { + "Target": "Loose File License(s)", + "Class": "license-file" + } + ] +} diff --git a/misc/eol/data/ubuntu.csv b/misc/eol/data/ubuntu.csv index 98a9646d6142..bd9e81a95bbc 100644 --- a/misc/eol/data/ubuntu.csv +++ b/misc/eol/data/ubuntu.csv @@ -30,3 +30,4 @@ 19.04,Disco Dingo,disco,2018-10-18,2019-04-18,2020-01-18 19.10,Eoan Ermine,eoan,2019-04-18,2019-10-17,2020-07-17 20.04 LTS,Focal Fossa,focal,2020-04-23,2025-04-23,2030-04-23 +22.04 LTS,Jammy Jellyfish,jammy,2022-04-21,2027-04-21,2032-04-21 diff --git a/pkg/cloud/aws/commands/run.go b/pkg/cloud/aws/commands/run.go index a4541e9f0544..23406aeafda5 100644 --- a/pkg/cloud/aws/commands/run.go +++ b/pkg/cloud/aws/commands/run.go @@ -132,10 +132,6 @@ func Run(ctx context.Context, opt flag.Options) error { ctx, cancel := context.WithTimeout(ctx, opt.GlobalOptions.Timeout) defer cancel() - if err := log.InitLogger(opt.Debug, false); err != nil { - return xerrors.Errorf("logger error: %w", err) - } - var err error defer func() { if errors.Is(err, context.DeadlineExceeded) { diff --git a/pkg/cloud/aws/commands/run_test.go b/pkg/cloud/aws/commands/run_test.go index feacdcc5a762..fe25bf20098d 100644 --- a/pkg/cloud/aws/commands/run_test.go +++ b/pkg/cloud/aws/commands/run_test.go @@ -267,6 +267,63 @@ const expectedS3ScanResult = `{ } ` +const expectedS3ScanResultWithExceptions = `{ + "CreatedAt": "2021-08-25T12:20:30.000000005Z", + "ArtifactName": "12345678", + "ArtifactType": "aws_account", + "Metadata": { + "ImageConfig": { + "architecture": "", + "created": "0001-01-01T00:00:00Z", + "os": "", + "rootfs": { + "type": "", + "diff_ids": null + }, + "config": {} + } + }, + "Results": [ + { + "Target": "arn:aws:s3:::examplebucket", + "Class": "config", + "Type": "cloud", + "MisconfSummary": { + "Successes": 0, + "Failures": 1, + "Exceptions": 8 + }, + "Misconfigurations": [ + { + "Type": "AWS", + "ID": "AVD-AWS-0094", + "AVDID": "AVD-AWS-0094", + "Title": "S3 buckets should each define an aws_s3_bucket_public_access_block", + "Description": "The \"block public access\" settings in S3 override individual policies that apply to a given bucket, meaning that all public access can be controlled in one central types for that bucket. It is therefore good practice to define these settings for each bucket in order to clearly define the public access that can be allowed for it.", + "Message": "Bucket does not have a corresponding public access block.", + "Resolution": "Define a aws_s3_bucket_public_access_block for the given bucket to control public access policies", + "Severity": "LOW", + "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0094", + "References": [ + "https://avd.aquasec.com/misconfig/avd-aws-0094" + ], + "Status": "FAIL", + "Layer": {}, + "CauseMetadata": { + "Resource": "arn:aws:s3:::examplebucket", + "Provider": "aws", + "Service": "s3", + "Code": { + "Lines": null + } + } + } + ] + } + ] +} +` + const expectedCustomScanResult = `{ "CreatedAt": "2021-08-25T12:20:30.000000005Z", "ArtifactName": "12345678", @@ -915,6 +972,7 @@ func Test_Run(t *testing.T) { regoPolicy string allServices []string inputData string + ignoreFile string }{ { name: "succeed with cached infra", @@ -1140,6 +1198,25 @@ Summary Report for compliance: my-custom-spec cacheContent: "testdata/s3andcloudtrailcache.json", expectErr: true, }, + { + name: "ignore findings with .trivyignore", + options: flag.Options{ + RegoOptions: flag.RegoOptions{SkipPolicyUpdate: true}, + AWSOptions: flag.AWSOptions{ + Region: "us-east-1", + Services: []string{"s3"}, + Account: "12345678", + }, + CloudOptions: flag.CloudOptions{ + MaxCacheAge: time.Hour * 24 * 365 * 100, + }, + MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, + }, + cacheContent: "testdata/s3onlycache.json", + allServices: []string{"s3"}, + ignoreFile: "testdata/.trivyignore", + want: expectedS3ScanResultWithExceptions, + }, } ctx := clock.With(context.Background(), time.Date(2021, 8, 25, 12, 20, 30, 5, time.UTC)) @@ -1192,6 +1269,10 @@ Summary Report for compliance: my-custom-spec require.NoError(t, os.WriteFile(cacheFile, cacheData, 0600)) } + if test.ignoreFile != "" { + test.options.ReportOptions.IgnoreFile = test.ignoreFile + } + err := Run(ctx, test.options) if test.expectErr { assert.Error(t, err) diff --git a/pkg/cloud/aws/commands/testdata/.trivyignore b/pkg/cloud/aws/commands/testdata/.trivyignore new file mode 100644 index 000000000000..44ef395ee173 --- /dev/null +++ b/pkg/cloud/aws/commands/testdata/.trivyignore @@ -0,0 +1,8 @@ +AVD-AWS-0086 +AVD-AWS-0087 +AVD-AWS-0088 +AVD-AWS-0090 +AVD-AWS-0132 +AVD-AWS-0091 +AVD-AWS-0092 +AVD-AWS-0093 \ No newline at end of file diff --git a/pkg/cloud/aws/scanner/progress.go b/pkg/cloud/aws/scanner/progress.go index 243e6eb47f39..a313dd482c6c 100644 --- a/pkg/cloud/aws/scanner/progress.go +++ b/pkg/cloud/aws/scanner/progress.go @@ -2,6 +2,7 @@ package scanner import ( "fmt" + "io" "os" "github.com/aquasecurity/loading/pkg/bar" @@ -12,15 +13,17 @@ type progressTracker struct { serviceTotal int serviceCurrent int isTTY bool + debugWriter io.Writer } -func newProgressTracker() *progressTracker { +func newProgressTracker(w io.Writer) *progressTracker { var isTTY bool if stat, err := os.Stdout.Stat(); err == nil { isTTY = stat.Mode()&os.ModeCharDevice == os.ModeCharDevice } return &progressTracker{ - isTTY: isTTY, + isTTY: isTTY, + debugWriter: w, } } @@ -69,7 +72,8 @@ func (m *progressTracker) StartService(name string) { if !m.isTTY { return } - fmt.Printf("[%d/%d] Scanning %s...\n", m.serviceCurrent+1, m.serviceTotal, name) + + fmt.Fprintf(m.debugWriter, "[%d/%d] Scanning %s...\n", m.serviceCurrent+1, m.serviceTotal, name) m.serviceBar = bar.New( bar.OptionHideOnFinish(true), bar.OptionWithAutoComplete(false), diff --git a/pkg/cloud/aws/scanner/scanner.go b/pkg/cloud/aws/scanner/scanner.go index d91721512505..84b5cf6c640e 100644 --- a/pkg/cloud/aws/scanner/scanner.go +++ b/pkg/cloud/aws/scanner/scanner.go @@ -31,9 +31,11 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result awsCache := cache.New(option.CacheDir, option.MaxCacheAge, option.Account, option.Region) included, missing := awsCache.ListServices(option.Services) + prefixedLogger := &log.PrefixedLogger{Name: "aws"} + var scannerOpts []options.ScannerOption if !option.NoProgress { - tracker := newProgressTracker() + tracker := newProgressTracker(prefixedLogger) defer tracker.Finish() scannerOpts = append(scannerOpts, aws.ScannerWithProgressTracker(tracker)) } @@ -43,11 +45,11 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result } if option.Debug { - scannerOpts = append(scannerOpts, options.ScannerWithDebug(&log.PrefixedLogger{Name: "aws"})) + scannerOpts = append(scannerOpts, options.ScannerWithDebug(prefixedLogger)) } if option.Trace { - scannerOpts = append(scannerOpts, options.ScannerWithTrace(&log.PrefixedLogger{Name: "aws"})) + scannerOpts = append(scannerOpts, options.ScannerWithTrace(prefixedLogger)) } if option.Region != "" { @@ -67,7 +69,7 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result var policyPaths []string var downloadedPolicyPaths []string var err error - downloadedPolicyPaths, err = operation.InitBuiltinPolicies(context.Background(), option.CacheDir, option.Quiet, option.SkipPolicyUpdate, option.MisconfOptions.PolicyBundleRepository) + downloadedPolicyPaths, err = operation.InitBuiltinPolicies(context.Background(), option.CacheDir, option.Quiet, option.SkipPolicyUpdate, option.MisconfOptions.PolicyBundleRepository, option.RegistryOpts()) if err != nil { if !option.SkipPolicyUpdate { log.Logger.Errorf("Falling back to embedded policies: %s", err) diff --git a/pkg/cloud/report/report.go b/pkg/cloud/report/report.go index b2a9d50cf507..2b2f8f3f17ea 100644 --- a/pkg/cloud/report/report.go +++ b/pkg/cloud/report/report.go @@ -70,16 +70,18 @@ func Write(ctx context.Context, rep *Report, opt flag.Options, fromCache bool) e return writeCompliance(ctx, rep, opt, output) } + ignoreConf, err := result.ParseIgnoreFile(ctx, opt.IgnoreFile) + if err != nil { + return xerrors.Errorf("%s error: %w", opt.IgnoreFile, err) + } + var filtered []types.Result // filter results for _, resultsAtTime := range rep.Results { for _, res := range resultsAtTime.Results { resCopy := res - if err := result.FilterResult(ctx, &resCopy, result.IgnoreConfig{}, result.FilterOption{ - Severities: opt.Severities, - IncludeNonFailures: opt.IncludeNonFailures, - }); err != nil { + if err := result.FilterResult(ctx, &resCopy, ignoreConf, opt.FilterOpts()); err != nil { return err } sort.Slice(resCopy.Misconfigurations, func(i, j int) bool { diff --git a/pkg/commands/app.go b/pkg/commands/app.go index 069c9f8b71cd..41d1d2ff645d 100644 --- a/pkg/commands/app.go +++ b/pkg/commands/app.go @@ -1125,11 +1125,24 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { reportFlagGroup.DependencyTree = nil // disable '--dependency-tree' reportFlagGroup.ReportFormat = nil // TODO: support --report summary + scanners := flag.ScannersFlag.Clone() + scanners.Values = xstrings.ToStringSlice(types.Scanners{ + types.VulnerabilityScanner, + types.LicenseScanner, + }) + scanners.Default = xstrings.ToStringSlice(types.Scanners{ + types.VulnerabilityScanner, + }) scanFlagGroup := flag.NewScanFlagGroup() - scanFlagGroup.Scanners = nil // disable '--scanners' as it always scans for vulnerabilities + scanFlagGroup.Scanners = scanners // allow only 'vuln' and 'license' options for '--scanners' scanFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps' scanFlagGroup.Parallel = nil // disable '--parallel' + licenseFlagGroup := flag.NewLicenseFlagGroup() + // License full-scan and confidence-level are for file content only + licenseFlagGroup.LicenseFull = nil + licenseFlagGroup.LicenseConfidenceLevel = nil + sbomFlags := &flag.Flags{ GlobalFlagGroup: globalFlags, CacheFlagGroup: flag.NewCacheFlagGroup(), @@ -1139,11 +1152,12 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { ScanFlagGroup: scanFlagGroup, SBOMFlagGroup: flag.NewSBOMFlagGroup(), VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(), + LicenseFlagGroup: licenseFlagGroup, } cmd := &cobra.Command{ Use: "sbom [flags] SBOM_PATH", - Short: "Scan SBOM for vulnerabilities", + Short: "Scan SBOM for vulnerabilities and licenses", GroupID: groupScanning, Example: ` # Scan CycloneDX and show the result in tables $ trivy sbom /path/to/report.cdx @@ -1166,9 +1180,6 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { return xerrors.Errorf("flag error: %w", err) } - // Scan vulnerabilities - options.Scanners = types.Scanners{types.VulnerabilityScanner} - return artifact.Run(cmd.Context(), options, artifact.TargetSBOM) }, SilenceErrors: true, diff --git a/pkg/commands/artifact/run.go b/pkg/commands/artifact/run.go index da3b86c0e8b9..c54f0fe2fe75 100644 --- a/pkg/commands/artifact/run.go +++ b/pkg/commands/artifact/run.go @@ -584,7 +584,7 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi var downloadedPolicyPaths []string var disableEmbedded bool - downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate, opts.MisconfOptions.PolicyBundleRepository) + downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate, opts.MisconfOptions.PolicyBundleRepository, opts.RegistryOpts()) if err != nil { if !opts.SkipPolicyUpdate { log.Logger.Errorf("Falling back to embedded policies: %s", err) diff --git a/pkg/commands/operation/operation.go b/pkg/commands/operation/operation.go index 5ca8301b422d..8f8561a7c290 100644 --- a/pkg/commands/operation/operation.go +++ b/pkg/commands/operation/operation.go @@ -9,6 +9,7 @@ import ( "sync" "github.com/go-redis/redis/v8" + "github.com/google/go-containerregistry/pkg/name" "github.com/google/wire" "github.com/samber/lo" "golang.org/x/xerrors" @@ -110,7 +111,8 @@ func (c Cache) ClearArtifacts() error { } // DownloadDB downloads the DB -func DownloadDB(ctx context.Context, appVersion, cacheDir, dbRepository string, quiet, skipUpdate bool, opt ftypes.RegistryOptions) error { +func DownloadDB(ctx context.Context, appVersion, cacheDir string, dbRepository name.Reference, quiet, skipUpdate bool, + opt ftypes.RegistryOptions) error { mu.Lock() defer mu.Unlock() @@ -148,7 +150,7 @@ func showDBInfo(cacheDir string) error { } // InitBuiltinPolicies downloads the built-in policies and loads them -func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate bool, policyBundleRepository string) ([]string, error) { +func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate bool, policyBundleRepository string, registryOpts ftypes.RegistryOptions) ([]string, error) { mu.Lock() defer mu.Unlock() @@ -159,7 +161,7 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate needsUpdate := false if !skipUpdate { - needsUpdate, err = client.NeedsUpdate(ctx) + needsUpdate, err = client.NeedsUpdate(ctx, registryOpts) if err != nil { return nil, xerrors.Errorf("unable to check if built-in policies need to be updated: %w", err) } @@ -168,7 +170,7 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate if needsUpdate { log.Logger.Info("Need to update the built-in policies") log.Logger.Info("Downloading the built-in policies...") - if err = client.DownloadBuiltinPolicies(ctx); err != nil { + if err = client.DownloadBuiltinPolicies(ctx, registryOpts); err != nil { return nil, xerrors.Errorf("failed to download built-in policies: %w", err) } } diff --git a/pkg/db/db.go b/pkg/db/db.go index fddd3393ea70..9ecb281b064e 100644 --- a/pkg/db/db.go +++ b/pkg/db/db.go @@ -4,9 +4,9 @@ import ( "context" "errors" "fmt" - "strings" "time" + "github.com/google/go-containerregistry/pkg/name" "github.com/google/go-containerregistry/pkg/v1/remote/transport" "golang.org/x/xerrors" "k8s.io/utils/clock" @@ -19,8 +19,13 @@ import ( ) const ( - dbMediaType = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip" - defaultDBRepository = "ghcr.io/aquasecurity/trivy-db" + SchemaVersion = db.SchemaVersion + dbMediaType = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip" +) + +var ( + DefaultRepository = fmt.Sprintf("%s:%d", "ghcr.io/aquasecurity/trivy-db", db.SchemaVersion) + defaultRepository, _ = name.NewTag(DefaultRepository) ) // Operation defines the DB operations @@ -32,7 +37,7 @@ type Operation interface { type options struct { artifact *oci.Artifact clock clock.Clock - dbRepository string + dbRepository name.Reference } // Option is a functional option @@ -46,7 +51,7 @@ func WithOCIArtifact(art *oci.Artifact) Option { } // WithDBRepository takes a dbRepository -func WithDBRepository(dbRepository string) Option { +func WithDBRepository(dbRepository name.Reference) Option { return func(opts *options) { opts.dbRepository = dbRepository } @@ -72,19 +77,13 @@ type Client struct { func NewClient(cacheDir string, quiet bool, opts ...Option) *Client { o := &options{ clock: clock.RealClock{}, - dbRepository: defaultDBRepository, + dbRepository: defaultRepository, } for _, opt := range opts { opt(o) } - // Add the schema version as a tag if the tag doesn't exist. - // This is required for backward compatibility. - if !strings.Contains(o.dbRepository, ":") { - o.dbRepository = fmt.Sprintf("%s:%d", o.dbRepository, db.SchemaVersion) - } - return &Client{ options: o, cacheDir: cacheDir, @@ -195,7 +194,7 @@ func (c *Client) initOCIArtifact(opt types.RegistryOptions) (*oci.Artifact, erro return c.artifact, nil } - art, err := oci.NewArtifact(c.dbRepository, c.quiet, opt) + art, err := oci.NewArtifact(c.dbRepository.String(), c.quiet, opt) if err != nil { var terr *transport.Error if errors.As(err, &terr) { diff --git a/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum b/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum index 3d1d7c0e3913..8a219a39d474 100644 --- a/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum +++ b/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum @@ -50,6 +50,7 @@ golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= diff --git a/pkg/dependency/parser/gradle/lockfile/parse.go b/pkg/dependency/parser/gradle/lockfile/parse.go index 3a60f3f58872..6d466570d2ff 100644 --- a/pkg/dependency/parser/gradle/lockfile/parse.go +++ b/pkg/dependency/parser/gradle/lockfile/parse.go @@ -46,6 +46,10 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er EndLine: lineNum, }, }, + // There is no reliable way to determine direct dependencies (even using other files). + // Therefore, we mark all dependencies as Indirect. + // This is necessary to try to guess direct dependencies and build a dependency tree. + Indirect: true, }) } diff --git a/pkg/dependency/parser/gradle/lockfile/parse_test.go b/pkg/dependency/parser/gradle/lockfile/parse_test.go index e9f76883e4e5..49cc7fe1c3a3 100644 --- a/pkg/dependency/parser/gradle/lockfile/parse_test.go +++ b/pkg/dependency/parser/gradle/lockfile/parse_test.go @@ -21,9 +21,10 @@ func TestParser_Parse(t *testing.T) { inputFile: "testdata/happy.lockfile", want: []types.Library{ { - ID: "cglib:cglib-nodep:2.1.2", - Name: "cglib:cglib-nodep", - Version: "2.1.2", + ID: "cglib:cglib-nodep:2.1.2", + Name: "cglib:cglib-nodep", + Version: "2.1.2", + Indirect: true, Locations: []types.Location{ { StartLine: 4, @@ -32,9 +33,10 @@ func TestParser_Parse(t *testing.T) { }, }, { - ID: "org.springframework:spring-asm:3.1.3.RELEASE", - Name: "org.springframework:spring-asm", - Version: "3.1.3.RELEASE", + ID: "org.springframework:spring-asm:3.1.3.RELEASE", + Name: "org.springframework:spring-asm", + Version: "3.1.3.RELEASE", + Indirect: true, Locations: []types.Location{ { StartLine: 5, @@ -43,9 +45,10 @@ func TestParser_Parse(t *testing.T) { }, }, { - ID: "org.springframework:spring-beans:5.0.5.RELEASE", - Name: "org.springframework:spring-beans", - Version: "5.0.5.RELEASE", + ID: "org.springframework:spring-beans:5.0.5.RELEASE", + Name: "org.springframework:spring-beans", + Version: "5.0.5.RELEASE", + Indirect: true, Locations: []types.Location{ { StartLine: 6, diff --git a/pkg/dependency/parser/java/pom/parse.go b/pkg/dependency/parser/java/pom/parse.go index 8abecc5df36c..955f8cfd9e33 100644 --- a/pkg/dependency/parser/java/pom/parse.go +++ b/pkg/dependency/parser/java/pom/parse.go @@ -105,10 +105,10 @@ func (p *parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, // Cache root POM p.cache.put(result.artifact, result) - return p.parseRoot(root.artifact()) + return p.parseRoot(root.artifact(), make(map[string]struct{})) } -func (p *parser) parseRoot(root artifact) ([]types.Library, []types.Dependency, error) { +func (p *parser) parseRoot(root artifact, uniqModules map[string]struct{}) ([]types.Library, []types.Dependency, error) { // Prepare a queue for dependencies queue := newArtifactQueue() @@ -132,7 +132,12 @@ func (p *parser) parseRoot(root artifact) ([]types.Library, []types.Dependency, // Modules should be handled separately so that they can have independent dependencies. // It means multi-module allows for duplicate dependencies. if art.Module { - moduleLibs, moduleDeps, err := p.parseRoot(art) + if _, ok := uniqModules[art.String()]; ok { + continue + } + uniqModules[art.String()] = struct{}{} + + moduleLibs, moduleDeps, err := p.parseRoot(art, uniqModules) if err != nil { return nil, nil, err } diff --git a/pkg/dependency/parser/java/pom/parse_test.go b/pkg/dependency/parser/java/pom/parse_test.go index 4123d1dde960..b73e40511507 100644 --- a/pkg/dependency/parser/java/pom/parse_test.go +++ b/pkg/dependency/parser/java/pom/parse_test.go @@ -959,6 +959,43 @@ func TestPom_Parse(t *testing.T) { }, }, }, + { + name: "Infinity loop for modules", + inputFile: filepath.Join("testdata", "modules-infinity-loop", "pom.xml"), + local: true, + want: []types.Library{ + // as module + { + ID: "org.example:module-1:2.0.0", + Name: "org.example:module-1", + Version: "2.0.0", + }, + // as dependency + { + ID: "org.example:module-1:2.0.0", + Name: "org.example:module-1", + Version: "2.0.0", + }, + { + ID: "org.example:module-2:3.0.0", + Name: "org.example:module-2", + Version: "3.0.0", + }, + { + ID: "org.example:root:1.0.0", + Name: "org.example:root", + Version: "1.0.0", + }, + }, + wantDeps: []types.Dependency{ + { + ID: "org.example:module-2:3.0.0", + DependsOn: []string{ + "org.example:module-1:2.0.0", + }, + }, + }, + }, { name: "multi module soft requirement", inputFile: filepath.Join("testdata", "multi-module-soft-requirement", "pom.xml"), diff --git a/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/module-2/pom.xml b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/module-2/pom.xml new file mode 100644 index 000000000000..37f39009ce97 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/module-2/pom.xml @@ -0,0 +1,16 @@ + + 4.0.0 + + module-2 + org.example + 3.0.0 + + + + org.example + module-1 + 2.0.0 + + + diff --git a/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/pom.xml b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/pom.xml new file mode 100644 index 000000000000..9952a80dc685 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/pom.xml @@ -0,0 +1,12 @@ + + 4.0.0 + + module-1 + org.example + 2.0.0 + + + module-2 + + diff --git a/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/pom.xml b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/pom.xml new file mode 100644 index 000000000000..372fefd3fce9 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/pom.xml @@ -0,0 +1,13 @@ + + 4.0.0 + + root + org.example + 1.0.0 + + + module-1 + module-2 + + diff --git a/pkg/dependency/parser/nodejs/npm/parse.go b/pkg/dependency/parser/nodejs/npm/parse.go index e289720b89a2..b74cfa5ce2f5 100644 --- a/pkg/dependency/parser/nodejs/npm/parse.go +++ b/pkg/dependency/parser/nodejs/npm/parse.go @@ -4,6 +4,7 @@ import ( "fmt" "io" "path" + "slices" "sort" "strings" @@ -115,28 +116,42 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. EndLine: pkg.EndLine, } + var ref types.ExternalRef + if pkg.Resolved != "" { + ref = types.ExternalRef{ + Type: types.RefOther, + URL: pkg.Resolved, + } + } + + pkgIndirect := isIndirectLib(pkgPath, directDeps) + // There are cases when similar libraries use same dependencies // we need to add location for each these dependencies if savedLib, ok := libs[pkgID]; ok { + savedLib.Dev = savedLib.Dev && pkg.Dev + savedLib.Indirect = savedLib.Indirect && pkgIndirect + + if ref.URL != "" && !slices.Contains(savedLib.ExternalReferences, ref) { + savedLib.ExternalReferences = append(savedLib.ExternalReferences, ref) + sortExternalReferences(savedLib.ExternalReferences) + } + savedLib.Locations = append(savedLib.Locations, location) sort.Sort(savedLib.Locations) + libs[pkgID] = savedLib continue } lib := types.Library{ - ID: pkgID, - Name: pkgName, - Version: pkg.Version, - Indirect: isIndirectLib(pkgPath, directDeps), - Dev: pkg.Dev, - ExternalReferences: []types.ExternalRef{ - { - Type: types.RefOther, - URL: pkg.Resolved, - }, - }, - Locations: []types.Location{location}, + ID: pkgID, + Name: pkgName, + Version: pkg.Version, + Indirect: pkgIndirect, + Dev: pkg.Dev, + ExternalReferences: lo.Ternary(ref.URL != "", []types.ExternalRef{ref}, nil), + Locations: []types.Location{location}, } libs[pkgID] = lib @@ -385,3 +400,12 @@ func (t *Package) UnmarshalJSONWithMetadata(node jfather.Node) error { func packageID(name, version string) string { return dependency.ID(ftypes.Npm, name, version) } + +func sortExternalReferences(refs []types.ExternalRef) { + sort.Slice(refs, func(i, j int) bool { + if refs[i].Type != refs[j].Type { + return refs[i].Type < refs[j].Type + } + return refs[i].URL < refs[j].URL + }) +} diff --git a/pkg/dependency/parser/nodejs/npm/parse_test.go b/pkg/dependency/parser/nodejs/npm/parse_test.go index c67055a71628..786fe643dfde 100644 --- a/pkg/dependency/parser/nodejs/npm/parse_test.go +++ b/pkg/dependency/parser/nodejs/npm/parse_test.go @@ -41,6 +41,12 @@ func TestParse(t *testing.T) { want: npmV3WithWorkspaceLibs, wantDeps: npmV3WithWorkspaceDeps, }, + { + name: "lock file v3 contains same dev and non-dev dependencies", + file: "testdata/package-lock_v3_with-same-dev-and-non-dev.json", + want: npmV3WithSameDevAndNonDevLibs, + wantDeps: npmV3WithSameDevAndNonDevDeps, + }, { name: "lock version v3 with workspace and without direct deps field", file: "testdata/package-lock_v3_without_root_deps_field.json", diff --git a/pkg/dependency/parser/nodejs/npm/parse_testcase.go b/pkg/dependency/parser/nodejs/npm/parse_testcase.go index e68addd15219..29b9e63d8f1c 100644 --- a/pkg/dependency/parser/nodejs/npm/parse_testcase.go +++ b/pkg/dependency/parser/nodejs/npm/parse_testcase.go @@ -1516,4 +1516,89 @@ var ( DependsOn: []string{"debug@2.6.9"}, }, } + + npmV3WithSameDevAndNonDevLibs = []types.Library{ + { + ID: "fsevents@1.2.9", + Name: "fsevents", + Version: "1.2.9", + Dev: true, + ExternalReferences: []types.ExternalRef{ + { + Type: types.RefOther, + URL: "https://registry.npmjs.org/fsevents/-/fsevents-1.2.9.tgz", + }, + }, + Locations: []types.Location{ + { + StartLine: 18, + EndLine: 37, + }, + }, + }, + { + ID: "minimist@0.0.8", + Name: "minimist", + Version: "0.0.8", + Indirect: false, + Dev: false, + ExternalReferences: []types.ExternalRef{ + { + Type: types.RefOther, + URL: "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + }, + }, + Locations: []types.Location{ + { + StartLine: 38, + EndLine: 43, + }, + { + StartLine: 68, + EndLine: 72, + }, + }, + }, + { + ID: "mkdirp@0.5.1", + Name: "mkdirp", + Version: "0.5.1", + Indirect: true, + Dev: true, + Locations: []types.Location{ + { + StartLine: 44, + EndLine: 55, + }, + }, + }, + { + ID: "node-pre-gyp@0.12.0", + Name: "node-pre-gyp", + Version: "0.12.0", + Indirect: true, + Dev: true, + Locations: []types.Location{ + { + StartLine: 56, + EndLine: 67, + }, + }, + }, + } + + npmV3WithSameDevAndNonDevDeps = []types.Dependency{ + { + ID: "fsevents@1.2.9", + DependsOn: []string{"node-pre-gyp@0.12.0"}, + }, + { + ID: "mkdirp@0.5.1", + DependsOn: []string{"minimist@0.0.8"}, + }, + { + ID: "node-pre-gyp@0.12.0", + DependsOn: []string{"mkdirp@0.5.1"}, + }, + } ) diff --git a/pkg/dependency/parser/nodejs/npm/testdata/package-lock_v3_with-same-dev-and-non-dev.json b/pkg/dependency/parser/nodejs/npm/testdata/package-lock_v3_with-same-dev-and-non-dev.json new file mode 100644 index 000000000000..4fe518b82bcd --- /dev/null +++ b/pkg/dependency/parser/nodejs/npm/testdata/package-lock_v3_with-same-dev-and-non-dev.json @@ -0,0 +1,74 @@ +{ + "name": "5139", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "5139", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "minimist": "^0.0.8" + }, + "devDependencies": { + "fsevents": "^1.2.9" + } + }, + "node_modules/fsevents": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.9.tgz", + "integrity": "sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw==", + "bundleDependencies": [ + "node-pre-gyp" + ], + "deprecated": "The v1 package contains DANGEROUS / INSECURE binaries. Upgrade to safe fsevents v2", + "dev": true, + "hasInstallScript": true, + "os": [ + "darwin" + ], + "dependencies": { + "node-pre-gyp": "^0.12.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/fsevents/node_modules/minimist": { + "version": "0.0.8", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/fsevents/node_modules/mkdirp": { + "version": "0.5.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "minimist": "0.0.8" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/fsevents/node_modules/node-pre-gyp": { + "version": "0.12.0", + "dev": true, + "inBundle": true, + "license": "BSD-3-Clause", + "dependencies": { + "mkdirp": "^0.5.1" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, + "node_modules/minimist": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" + } + } +} diff --git a/pkg/dependency/parser/nodejs/packagejson/parse.go b/pkg/dependency/parser/nodejs/packagejson/parse.go index f4bf258f7aae..19a53679f2d0 100644 --- a/pkg/dependency/parser/nodejs/packagejson/parse.go +++ b/pkg/dependency/parser/nodejs/packagejson/parse.go @@ -5,6 +5,7 @@ import ( "io" "regexp" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency" @@ -21,7 +22,7 @@ type packageJSON struct { Dependencies map[string]string `json:"dependencies"` OptionalDependencies map[string]string `json:"optionalDependencies"` DevDependencies map[string]string `json:"devDependencies"` - Workspaces []string `json:"workspaces"` + Workspaces any `json:"workspaces"` } type Package struct { @@ -65,7 +66,7 @@ func (p *Parser) Parse(r io.Reader) (Package, error) { Dependencies: pkgJSON.Dependencies, OptionalDependencies: pkgJSON.OptionalDependencies, DevDependencies: pkgJSON.DevDependencies, - Workspaces: pkgJSON.Workspaces, + Workspaces: parseWorkspaces(pkgJSON.Workspaces), }, nil } @@ -82,6 +83,29 @@ func parseLicense(val interface{}) string { return "" } +// parseWorkspaces returns slice of workspaces +func parseWorkspaces(val any) []string { + // Workspaces support 2 types - https://github.com/SchemaStore/schemastore/blob/d9516961f8a5b0e65a457808070147b5a866f60b/src/schemas/json/package.json#L777 + switch ws := val.(type) { + // Workspace as object (map[string][]string) + // e.g. "workspaces": {"packages": ["packages/*", "plugins/*"]}, + case map[string]interface{}: + // Take only workspaces for `packages` - https://classic.yarnpkg.com/blog/2018/02/15/nohoist/ + if pkgsWorkspaces, ok := ws["packages"]; ok { + return lo.Map(pkgsWorkspaces.([]interface{}), func(workspace interface{}, _ int) string { + return workspace.(string) + }) + } + // Workspace as string array + // e.g. "workspaces": ["packages/*", "backend"], + case []interface{}: + return lo.Map(ws, func(workspace interface{}, _ int) string { + return workspace.(string) + }) + } + return nil +} + func IsValidName(name string) bool { // Name is optional field // https://docs.npmjs.com/cli/v9/configuring-npm/package-json#name diff --git a/pkg/dependency/parser/nodejs/packagejson/parse_test.go b/pkg/dependency/parser/nodejs/packagejson/parse_test.go index 4f04cebcc1ee..97a0027d22ef 100644 --- a/pkg/dependency/parser/nodejs/packagejson/parse_test.go +++ b/pkg/dependency/parser/nodejs/packagejson/parse_test.go @@ -76,6 +76,20 @@ func TestParse(t *testing.T) { }, }, }, + { + name: "happy path - workspace as struct", + inputFile: "testdata/workspace_as_map_package.json", + want: packagejson.Package{ + Library: types.Library{ + ID: "example@1.0.0", + Name: "example", + Version: "1.0.0", + }, + Workspaces: []string{ + "packages/*", + }, + }, + }, { name: "invalid package name", inputFile: "testdata/invalid_name.json", diff --git a/pkg/dependency/parser/nodejs/packagejson/testdata/workspace_as_map_package.json b/pkg/dependency/parser/nodejs/packagejson/testdata/workspace_as_map_package.json new file mode 100644 index 000000000000..21a198e8bc6e --- /dev/null +++ b/pkg/dependency/parser/nodejs/packagejson/testdata/workspace_as_map_package.json @@ -0,0 +1,8 @@ +{ + "name": "example", + "version": "1.0.0", + "workspaces": { + "packages": ["packages/*"], + "nohoist": ["**/react-native", "**/react-native/**"] + } +} diff --git a/pkg/fanal/analyzer/analyzer_test.go b/pkg/fanal/analyzer/analyzer_test.go index 2c7284a1ae83..8fee82acf600 100644 --- a/pkg/fanal/analyzer/analyzer_test.go +++ b/pkg/fanal/analyzer/analyzer_test.go @@ -3,6 +3,7 @@ package analyzer_test import ( "context" "fmt" + "github.com/google/go-containerregistry/pkg/name" "os" "sync" "testing" @@ -12,11 +13,11 @@ import ( "golang.org/x/sync/semaphore" "golang.org/x/xerrors" - xio "github.com/aquasecurity/trivy/pkg/x/io" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/javadb" "github.com/aquasecurity/trivy/pkg/mapfs" + xio "github.com/aquasecurity/trivy/pkg/x/io" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/imgconf/apk" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/jar" @@ -335,15 +336,18 @@ func TestAnalyzerGroup_AnalyzeFile(t *testing.T) { FilePath: "/lib/apk/db/installed", Packages: types.Packages{ { - ID: "musl@1.1.24-r2", - Name: "musl", - Version: "1.1.24-r2", - SrcName: "musl", - SrcVersion: "1.1.24-r2", - Licenses: []string{"MIT"}, - Arch: "x86_64", - Digest: "sha1:cb2316a189ebee5282c4a9bd98794cc2477a74c6", - InstalledFiles: []string{"lib/libc.musl-x86_64.so.1", "lib/ld-musl-x86_64.so.1"}, + ID: "musl@1.1.24-r2", + Name: "musl", + Version: "1.1.24-r2", + SrcName: "musl", + SrcVersion: "1.1.24-r2", + Licenses: []string{"MIT"}, + Arch: "x86_64", + Digest: "sha1:cb2316a189ebee5282c4a9bd98794cc2477a74c6", + InstalledFiles: []string{ + "lib/libc.musl-x86_64.so.1", + "lib/ld-musl-x86_64.so.1", + }, }, }, }, @@ -615,7 +619,9 @@ func TestAnalyzerGroup_PostAnalyze(t *testing.T) { if tt.analyzerType == analyzer.TypeJar { // init java-trivy-db with skip update - javadb.Init("./language/java/jar/testdata", "ghcr.io/aquasecurity/trivy-java-db", true, false, types.RegistryOptions{Insecure: false}) + repo, err := name.NewTag(javadb.DefaultRepository) + require.NoError(t, err) + javadb.Init("./language/java/jar/testdata", repo, true, false, types.RegistryOptions{Insecure: false}) } ctx := context.Background() diff --git a/pkg/fanal/analyzer/fs.go b/pkg/fanal/analyzer/fs.go index d578d6e6d06d..28880b6b0339 100644 --- a/pkg/fanal/analyzer/fs.go +++ b/pkg/fanal/analyzer/fs.go @@ -55,7 +55,8 @@ func (c *CompositeFS) CopyFileToTemp(opener Opener, info os.FileInfo) (string, e return "", xerrors.Errorf("copy error: %w", err) } - if err = os.Chmod(f.Name(), info.Mode()); err != nil { + // Use 0600 instead of file permissions to avoid errors when a file uses incorrect permissions (e.g. 0044). + if err = os.Chmod(f.Name(), 0600); err != nil { return "", xerrors.Errorf("chmod error: %w", err) } diff --git a/pkg/fanal/analyzer/language/java/gradle/lockfile.go b/pkg/fanal/analyzer/language/java/gradle/lockfile.go index 55661782fb66..5dddb0b49c3c 100644 --- a/pkg/fanal/analyzer/language/java/gradle/lockfile.go +++ b/pkg/fanal/analyzer/language/java/gradle/lockfile.go @@ -2,36 +2,104 @@ package gradle import ( "context" + "fmt" + "io" + "io/fs" "os" + "sort" "strings" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/parser/gradle/lockfile" + godeptypes "github.com/aquasecurity/trivy/pkg/dependency/types" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" + "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) func init() { - analyzer.RegisterAnalyzer(&gradleLockAnalyzer{}) + analyzer.RegisterPostAnalyzer(analyzer.TypeGradleLock, newGradleLockAnalyzer) } const ( - version = 1 + version = 2 fileNameSuffix = "gradle.lockfile" ) // gradleLockAnalyzer analyzes '*gradle.lockfile' -type gradleLockAnalyzer struct{} +type gradleLockAnalyzer struct { + parser godeptypes.Parser +} + +func newGradleLockAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { + return &gradleLockAnalyzer{ + parser: lockfile.NewParser(), + }, nil +} -func (a gradleLockAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { - p := lockfile.NewParser() - res, err := language.Analyze(types.Gradle, input.FilePath, input.Content, p) +func (a gradleLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysisInput) (*analyzer.AnalysisResult, error) { + poms, err := parsePoms() if err != nil { - return nil, xerrors.Errorf("%s parse error: %w", input.FilePath, err) + log.Logger.Warnf("Unable to get licenses and dependsOn: %s", err) + } + + required := func(path string, d fs.DirEntry) bool { + return a.Required(path, nil) } - return res, nil + + var apps []types.Application + err = fsutils.WalkDir(input.FS, ".", required, func(filePath string, _ fs.DirEntry, r io.Reader) error { + var app *types.Application + app, err = language.Parse(types.Gradle, filePath, r, a.parser) + if err != nil { + return xerrors.Errorf("%s parse error: %w", filePath, err) + } + + if app == nil { + return nil + } + + libs := lo.SliceToMap(app.Libraries, func(lib types.Package) (string, struct{}) { + return lib.ID, struct{}{} + }) + + for i, lib := range app.Libraries { + pom := poms[lib.ID] + + // Fill licenses from pom file + if len(pom.Licenses.License) > 0 { + app.Libraries[i].Licenses = lo.Map(pom.Licenses.License, func(license License, _ int) string { + return license.Name + }) + } + + // File child deps from pom file + var deps []string + for _, dep := range pom.Dependencies.Dependency { + id := packageID(dep.GroupID, dep.ArtifactID, dep.Version) + if _, ok := libs[id]; ok { + deps = append(deps, id) + } + } + sort.Strings(deps) + app.Libraries[i].DependsOn = deps + } + + sort.Sort(app.Libraries) + apps = append(apps, *app) + return nil + }) + if err != nil { + return nil, xerrors.Errorf("walk error: %w", err) + } + + return &analyzer.AnalysisResult{ + Applications: apps, + }, nil } func (a gradleLockAnalyzer) Required(filePath string, _ os.FileInfo) bool { @@ -45,3 +113,7 @@ func (a gradleLockAnalyzer) Type() analyzer.Type { func (a gradleLockAnalyzer) Version() int { return version } + +func packageID(groupId, artifactId, ver string) string { + return fmt.Sprintf("%s:%s:%s", groupId, artifactId, ver) +} diff --git a/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go b/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go index e48ce885865b..b1868fecb936 100644 --- a/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go +++ b/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go @@ -1,6 +1,7 @@ package gradle import ( + "context" "os" "testing" @@ -13,23 +14,70 @@ import ( func Test_gradleLockAnalyzer_Analyze(t *testing.T) { tests := []struct { - name string - inputFile string - want *analyzer.AnalysisResult + name string + dir string + cacheDir string + want *analyzer.AnalysisResult }{ { - name: "happy path", - inputFile: "testdata/happy.lockfile", + name: "happy path", + dir: "testdata/lockfiles/happy", + cacheDir: "testdata/cache", + want: &analyzer.AnalysisResult{ + Applications: []types.Application{ + { + Type: types.Gradle, + FilePath: "gradle.lockfile", + Libraries: types.Packages{ + { + ID: "junit:junit:4.13", + Name: "junit:junit", + Version: "4.13", + Indirect: true, + Locations: []types.Location{ + { + StartLine: 4, + EndLine: 4, + }, + }, + Licenses: []string{ + "Eclipse Public License 1.0", + }, + DependsOn: []string{ + "org.hamcrest:hamcrest-core:1.3", + }, + }, + { + ID: "org.hamcrest:hamcrest-core:1.3", + Name: "org.hamcrest:hamcrest-core", + Version: "1.3", + Indirect: true, + Locations: []types.Location{ + { + StartLine: 5, + EndLine: 5, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "happy path without cache", + dir: "testdata/lockfiles/happy", want: &analyzer.AnalysisResult{ Applications: []types.Application{ { Type: types.Gradle, - FilePath: "testdata/happy.lockfile", + FilePath: "gradle.lockfile", Libraries: types.Packages{ { - ID: "com.example:example:0.0.1", - Name: "com.example:example", - Version: "0.0.1", + ID: "junit:junit:4.13", + Name: "junit:junit", + Version: "4.13", + Indirect: true, Locations: []types.Location{ { StartLine: 4, @@ -37,30 +85,41 @@ func Test_gradleLockAnalyzer_Analyze(t *testing.T) { }, }, }, + { + ID: "org.hamcrest:hamcrest-core:1.3", + Name: "org.hamcrest:hamcrest-core", + Version: "1.3", + Indirect: true, + Locations: []types.Location{ + { + StartLine: 5, + EndLine: 5, + }, + }, + }, }, }, }, }, }, { - name: "empty file", - inputFile: "testdata/empty.lockfile", + name: "empty file", + dir: "testdata/lockfiles/empty", + want: &analyzer.AnalysisResult{}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - f, err := os.Open(tt.inputFile) + if tt.cacheDir != "" { + t.Setenv("GRADLE_USER_HOME", tt.cacheDir) + } + + a, err := newGradleLockAnalyzer(analyzer.AnalyzerOptions{}) require.NoError(t, err) - defer func() { - err = f.Close() - assert.NoError(t, err) - }() - a := gradleLockAnalyzer{} - got, err := a.Analyze(nil, analyzer.AnalysisInput{ - FilePath: tt.inputFile, - Content: f, + got, err := a.PostAnalyze(context.Background(), analyzer.PostAnalysisInput{ + FS: os.DirFS(tt.dir), }) assert.NoError(t, err) diff --git a/pkg/fanal/analyzer/language/java/gradle/pom.go b/pkg/fanal/analyzer/language/java/gradle/pom.go new file mode 100644 index 000000000000..638b5c9fd61b --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/pom.go @@ -0,0 +1,166 @@ +package gradle + +import ( + "encoding/xml" + "io" + "io/fs" + "os" + "path/filepath" + "runtime" + "strings" + + "golang.org/x/net/html/charset" + "golang.org/x/xerrors" + + "github.com/aquasecurity/trivy/pkg/fanal/log" + "github.com/aquasecurity/trivy/pkg/utils/fsutils" +) + +type pomXML struct { + GroupId string `xml:"groupId"` + ArtifactId string `xml:"artifactId"` + Version string `xml:"version"` + Properties Properties `xml:"properties"` + Dependencies Dependencies `xml:"dependencies"` + Licenses Licenses `xml:"licenses"` +} +type Dependencies struct { + Dependency []Dependency `xml:"dependency"` +} + +type Dependency struct { + GroupID string `xml:"groupId"` + ArtifactID string `xml:"artifactId"` + Version string `xml:"version"` +} + +type Licenses struct { + License []License `xml:"license"` +} + +type License struct { + Name string `xml:"name"` +} + +type Properties map[string]string + +type property struct { + XMLName xml.Name + Value string `xml:",chardata"` +} + +func (props *Properties) UnmarshalXML(d *xml.Decoder, _ xml.StartElement) error { + *props = Properties{} + for { + var p property + err := d.Decode(&p) + if err == io.EOF { + break + } else if err != nil { + return xerrors.Errorf("XML decode error: %w", err) + } + + (*props)[p.XMLName.Local] = p.Value + } + return nil +} + +func parsePoms() (map[string]pomXML, error) { + cacheDir := detectCacheDir() + // Cache dir is not found + if cacheDir == "" { + return nil, nil + } + + required := func(path string, d fs.DirEntry) bool { + return filepath.Ext(path) == ".pom" + } + + var poms = make(map[string]pomXML) + err := fsutils.WalkDir(os.DirFS(cacheDir), ".", required, func(path string, _ fs.DirEntry, r io.Reader) error { + pom, err := parsePom(r, path) + if err != nil { + log.Logger.Debugf("Unable to parse %q: %s", path, err) + return nil + } + + if pom.ArtifactId != "" { + poms[packageID(pom.GroupId, pom.ArtifactId, pom.Version)] = pom + } + return nil + }) + if err != nil { + return nil, xerrors.Errorf("gradle licenses walk error: %w", err) + } + + return poms, nil +} + +func parsePom(r io.Reader, path string) (pomXML, error) { + pom := pomXML{} + decoder := xml.NewDecoder(r) + decoder.CharsetReader = charset.NewReaderLabel + if err := decoder.Decode(&pom); err != nil { + return pomXML{}, xerrors.Errorf("xml decode error: %w", err) + } + + // We only need pom's with licenses or dependencies + if len(pom.Licenses.License) == 0 && len(pom.Dependencies.Dependency) == 0 { + return pomXML{}, nil + } + + // If pom file doesn't contain GroupID or Version: + // find these values from filepath + // e.g. caches/modules-2/files-2.1/com.google.code.gson/gson/2.9.1/f0cf3edcef8dcb74d27cb427544a309eb718d772/gson-2.9.1.pom + dirs := strings.Split(filepath.ToSlash(path), "/") + if pom.GroupId == "" { + pom.GroupId = dirs[len(dirs)-5] + } + if pom.Version == "" { + pom.Version = dirs[len(dirs)-3] + } + + if err := pom.resolveDependencyVersions(); err != nil { + return pomXML{}, xerrors.Errorf("unable to resolve dependency version: %w", err) + } + + return pom, nil +} + +// resolveDependencyVersions resolves versions from properties +func (pom *pomXML) resolveDependencyVersions() error { + for i, dep := range pom.Dependencies.Dependency { + if strings.HasPrefix(dep.Version, "${") && strings.HasSuffix(dep.Version, "}") { + dep.Version = strings.TrimPrefix(strings.TrimSuffix(dep.Version, "}"), "${") + if resolvedVer, ok := pom.Properties[dep.Version]; ok { + pom.Dependencies.Dependency[i].Version = resolvedVer + } else if dep.Version == "${project.version}" { + pom.Dependencies.Dependency[i].Version = dep.Version + } else { + // We use simplified logic to resolve properties. + // If necessary, update and use the logic for maven pom's + return xerrors.Errorf("Unable to resolve %q version. Please open a new discussion to update the Trivy logic.", dep.Version) + } + } + } + return nil +} + +func detectCacheDir() string { + // https://docs.gradle.org/current/userguide/directory_layout.html + dir := os.Getenv("GRADLE_USER_HOME") + if dir == "" { + if runtime.GOOS == "windows" { + dir = filepath.Join(os.Getenv("%HOMEPATH%"), ".gradle") + } else { + dir = filepath.Join(os.Getenv("HOME"), ".gradle") + } + } + dir = filepath.Join(dir, "caches") + + if !fsutils.DirExists(dir) { + log.Logger.Debug("Unable to get licenses and dependsOn. Gradle cache dir doesn't exist.") + return "" + } + return dir +} diff --git a/pkg/fanal/analyzer/language/java/gradle/pom_test.go b/pkg/fanal/analyzer/language/java/gradle/pom_test.go new file mode 100644 index 000000000000..4ca85c647e2e --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/pom_test.go @@ -0,0 +1,101 @@ +package gradle + +import ( + "github.com/stretchr/testify/require" + "os" + "path/filepath" + "testing" +) + +func Test_parsePom(t *testing.T) { + tests := []struct { + name string + inputFile string + inputPath string + want pomXML + }{ + { + name: "happy path", + inputFile: filepath.Join("testdata", "poms", "happy.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{ + GroupId: "org.example", + ArtifactId: "example-core", + Version: "1.0.0", + Licenses: Licenses{ + License: []License{ + { + Name: "Apache License, Version 2.0", + }, + }, + }, + Dependencies: Dependencies{ + Dependency: []Dependency{ + { + GroupID: "org.example", + ArtifactID: "example-api", + Version: "2.0.0", + }, + }, + }, + }, + }, + { + name: "happy path. Take GroupID and Version from path", + inputFile: filepath.Join("testdata", "poms", "without-groupid-and-version.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{ + GroupId: "org.example", + ArtifactId: "example-core", + Version: "1.0.0", + Licenses: Licenses{ + License: []License{ + { + Name: "Apache License, Version 2.0", + }, + }, + }, + }, + }, + { + name: "happy path. Dependency version as property.", + inputFile: filepath.Join("testdata", "poms", "dep-version-as-property.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{ + GroupId: "org.example", + ArtifactId: "example-core", + Version: "1.0.0", + Properties: Properties{ + "coreVersion": "2.0.1", + }, + Dependencies: Dependencies{ + Dependency: []Dependency{ + { + GroupID: "org.example", + ArtifactID: "example-api", + Version: "2.0.1", + }, + }, + }, + }, + }, + { + name: "happy path. Dependency version as property.", + inputFile: filepath.Join("testdata", "poms", "without-licenses-and-deps.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := os.Open(tt.inputFile) + require.NoError(t, err) + + pom, err := parsePom(f, tt.inputPath) + require.NoError(t, err) + + require.Equal(t, tt.want, pom) + }) + } +} diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/junit/junit/4.13/5c17760663fae422643fc859fd352c68a1d91bfc/junit-4.13.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/junit/junit/4.13/5c17760663fae422643fc859fd352c68a1d91bfc/junit-4.13.pom new file mode 100644 index 000000000000..40d49278c416 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/junit/junit/4.13/5c17760663fae422643fc859fd352c68a1d91bfc/junit-4.13.pom @@ -0,0 +1,587 @@ + + + 4.0.0 + + junit + junit + 4.13 + + JUnit + JUnit is a unit testing framework for Java, created by Erich Gamma and Kent Beck. + http://junit.org + 2002 + + JUnit + http://www.junit.org + + + + Eclipse Public License 1.0 + http://www.eclipse.org/legal/epl-v10.html + repo + + + + + + dsaff + David Saff + david@saff.net + + + kcooney + Kevin Cooney + kcooney@google.com + + + stefanbirkner + Stefan Birkner + mail@stefan-birkner.de + + + marcphilipp + Marc Philipp + mail@marcphilipp.de + + + + + JUnit contributors + JUnit + team@junit.org + https://github.com/junit-team/junit4/graphs/contributors + + developers + + + + + + 3.0.4 + + + + scm:git:git://github.com/junit-team/junit4.git + scm:git:git@github.com:junit-team/junit4.git + http://github.com/junit-team/junit4/tree/master + r4.13 + + + github + https://github.com/junit-team/junit4/issues + + + travis + https://travis-ci.org/junit-team/junit4 + + + https://github.com/junit-team/junit4/wiki/Download-and-Install + + junit-snapshot-repo + Nexus Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + + junit-releases-repo + Nexus Release Repository + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + + junit.github.io + gitsite:git@github.com/junit-team/junit4.git + + + + + 1.5 + 2.19.1 + 1.3 + ISO-8859-1 + + 67893CC4 + + + + + org.hamcrest + hamcrest-core + ${hamcrestVersion} + + + + org.hamcrest + hamcrest-library + ${hamcrestVersion} + test + + + + + + + ${project.basedir}/src/main/resources + + + ${project.basedir} + + LICENSE-junit.txt + + + + + + + + maven-enforcer-plugin + 1.4 + + + enforce-versions + initialize + + enforce + + + true + + + + Current version of Maven ${maven.version} required to build the project + should be ${project.prerequisites.maven}, or higher! + + [${project.prerequisites.maven},) + + + Current JDK version ${java.version} should be ${jdkVersion}, or higher! + + ${jdkVersion} + + + Best Practice is to never define repositories in pom.xml (use a repository + manager instead). + + + + No Snapshots Dependencies Allowed! + + + + + + + + + com.google.code.maven-replacer-plugin + replacer + 1.5.3 + + + process-sources + + replace + + + + + false + ${project.build.sourceDirectory}/junit/runner/Version.java.template + ${project.build.sourceDirectory}/junit/runner/Version.java + false + @version@ + ${project.version} + + + + + maven-compiler-plugin + 3.3 + + ${project.build.sourceEncoding} + ${jdkVersion} + ${jdkVersion} + ${jdkVersion} + ${jdkVersion} + 1.5 + true + true + true + true + + -Xlint:unchecked + + 128m + + + + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + signature-check + test + + check + + + + org.codehaus.mojo.signature + java15 + 1.0 + + + + + + + + maven-surefire-plugin + ${surefireVersion} + + org/junit/tests/AllTests.java + true + false + + + + org.apache.maven.surefire + surefire-junit47 + ${surefireVersion} + + + + + + maven-source-plugin + 2.4 + + + + maven-javadoc-plugin + 2.10.3 + + ${basedir}/src/main/javadoc/stylesheet.css + protected + false + false + false + true + true + true + JUnit API + UTF-8 + en + ${jdkVersion} + + + api_${jdkVersion} + http://docs.oracle.com/javase/${jdkVersion}.0/docs/api/ + + + *.internal.* + true + 32m + 128m + true + true + + org.hamcrest:hamcrest-core:* + + + + + maven-release-plugin + 2.5.2 + + forked-path + false + -Pgenerate-docs,junit-release ${arguments} + r@{project.version} + + + + maven-site-plugin + 3.4 + + + com.github.stephenc.wagon + wagon-gitsite + 0.4.1 + + + org.apache.maven.doxia + doxia-module-markdown + 1.5 + + + + + maven-jar-plugin + 2.6 + + + false + + true + + + junit + + + + + + maven-clean-plugin + 2.6.1 + + + maven-deploy-plugin + 2.8.2 + + + maven-install-plugin + 2.5.2 + + + maven-resources-plugin + 2.7 + + + + + + + + maven-project-info-reports-plugin + 2.8 + + false + + + + + + index + dependency-info + modules + license + project-team + scm + issue-tracking + mailing-list + dependency-management + dependencies + dependency-convergence + cim + distribution-management + + + + + + maven-javadoc-plugin + 2.10.3 + + javadoc/latest + ${basedir}/src/main/javadoc/stylesheet.css + protected + false + false + false + true + true + true + JUnit API + UTF-8 + en + ${jdkVersion} + + + api_${jdkVersion} + http://docs.oracle.com/javase/${jdkVersion}.0/docs/api/ + + + junit.*,*.internal.* + true + 32m + 128m + true + true + + org.hamcrest:hamcrest-core:* + + + + + + javadoc + + + + + + + + + + junit-release + + + + + + maven-gpg-plugin + 1.6 + + + gpg-sign + verify + + sign + + + + + + + + + generate-docs + + + + + maven-source-plugin + + + attach-sources + prepare-package + + jar-no-fork + + + + + + maven-javadoc-plugin + + + attach-javadoc + package + + jar + + + + + + + + + restrict-doclint + + + [1.8,) + + + + + maven-compiler-plugin + + + -Xlint:unchecked + -Xdoclint:accessibility,reference,syntax + + + + + maven-javadoc-plugin + + -Xdoclint:accessibility -Xdoclint:reference + + + + + + + + maven-javadoc-plugin + + -Xdoclint:accessibility -Xdoclint:reference + + + + + + + java9 + + [1.9,) + + + + 1.6 + + + + + maven-javadoc-plugin + + 1.6 + + + + + + + + maven-javadoc-plugin + + 1.6 + + + + + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/org.hamcrest/hamcrest-core/1.3/872e413497b906e7c9fa85ccc96046c5d1ef7ece/hamcrest-core-1.3.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/org.hamcrest/hamcrest-core/1.3/872e413497b906e7c9fa85ccc96046c5d1ef7ece/hamcrest-core-1.3.pom new file mode 100644 index 000000000000..0721781c99a0 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/org.hamcrest/hamcrest-core/1.3/872e413497b906e7c9fa85ccc96046c5d1ef7ece/hamcrest-core-1.3.pom @@ -0,0 +1,18 @@ + + + 4.0.0 + + + org.hamcrest + hamcrest-parent + 1.3 + + + hamcrest-core + jar + Hamcrest Core + + This is the core API of hamcrest matcher framework to be used by third-party framework providers. This includes the a foundation set of matcher implementations for common operations. + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/happy.lockfile b/pkg/fanal/analyzer/language/java/gradle/testdata/happy.lockfile deleted file mode 100644 index 3b965af31665..000000000000 --- a/pkg/fanal/analyzer/language/java/gradle/testdata/happy.lockfile +++ /dev/null @@ -1,5 +0,0 @@ -# This is a Gradle generated file for dependency locking. -# Manual edits can break the build and are not advised. -# This file is expected to be part of source control. -com.example:example:0.0.1=classpath -empty= \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/empty.lockfile b/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/empty/gradle.lockfile similarity index 100% rename from pkg/fanal/analyzer/language/java/gradle/testdata/empty.lockfile rename to pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/empty/gradle.lockfile diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/happy/gradle.lockfile b/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/happy/gradle.lockfile new file mode 100644 index 000000000000..957bb968cc8f --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/happy/gradle.lockfile @@ -0,0 +1,6 @@ +# This is a Gradle generated file for dependency locking. +# Manual edits can break the build and are not advised. +# This file is expected to be part of source control. +junit:junit:4.13=compileClasspath,runtimeClasspath,testCompileClasspath,testRuntimeClasspath +org.hamcrest:hamcrest-core:1.3=compileClasspath,runtimeClasspath,testCompileClasspath,testRuntimeClasspath +empty=annotationProcessor,testAnnotationProcessor \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/dep-version-as-property.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/dep-version-as-property.pom new file mode 100644 index 000000000000..7b2cd75b39f1 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/dep-version-as-property.pom @@ -0,0 +1,21 @@ + + + 4.0.0 + + org.example + example-core + 1.0.0 + + + 2.0.1 + + + + + org.example + example-api + ${coreVersion} + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/happy.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/happy.pom new file mode 100644 index 000000000000..896fb1df5981 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/happy.pom @@ -0,0 +1,23 @@ + + + 4.0.0 + + org.example + example-core + 1.0.0 + + + + Apache License, Version 2.0 + + + + + + org.example + example-api + 2.0.0 + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-groupid-and-version.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-groupid-and-version.pom new file mode 100644 index 000000000000..e94fcbaaaca2 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-groupid-and-version.pom @@ -0,0 +1,19 @@ + + + 4.0.0 + + + org.example + example-parent + 1.3 + + + example-core + + + + Apache License, Version 2.0 + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-licenses-and-deps.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-licenses-and-deps.pom new file mode 100644 index 000000000000..5c83a401353d --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-licenses-and-deps.pom @@ -0,0 +1,10 @@ + + + 4.0.0 + + org.example + example-core + 1.0.0 + + diff --git a/pkg/fanal/analyzer/language/java/jar/jar_test.go b/pkg/fanal/analyzer/language/java/jar/jar_test.go index 133ead426d7a..3988dc27daf5 100644 --- a/pkg/fanal/analyzer/language/java/jar/jar_test.go +++ b/pkg/fanal/analyzer/language/java/jar/jar_test.go @@ -2,6 +2,8 @@ package jar import ( "context" + "github.com/google/go-containerregistry/pkg/name" + "github.com/stretchr/testify/require" "os" "path/filepath" "testing" @@ -130,13 +132,15 @@ func Test_javaLibraryAnalyzer_Analyze(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // init java-trivy-db with skip update - javadb.Init("testdata", defaultJavaDBRepository, true, false, types.RegistryOptions{Insecure: false}) + repo, err := name.NewTag(javadb.DefaultRepository) + require.NoError(t, err) + javadb.Init("testdata", repo, true, false, types.RegistryOptions{Insecure: false}) a := javaLibraryAnalyzer{} ctx := context.Background() mfs := mapfs.New() - err := mfs.MkdirAll(filepath.Dir(tt.inputFile), os.ModePerm) + err = mfs.MkdirAll(filepath.Dir(tt.inputFile), os.ModePerm) assert.NoError(t, err) err = mfs.WriteFile(tt.inputFile, tt.inputFile) assert.NoError(t, err) diff --git a/pkg/fanal/analyzer/pkg/dpkg/scanner.go b/pkg/fanal/analyzer/pkg/dpkg/scanner.go index de5c39a6a174..2e38f06b0cf7 100644 --- a/pkg/fanal/analyzer/pkg/dpkg/scanner.go +++ b/pkg/fanal/analyzer/pkg/dpkg/scanner.go @@ -15,6 +15,11 @@ type dpkgScanner struct { // NewScanner returns a new scanner that splits on empty lines. func NewScanner(r io.Reader) *dpkgScanner { s := bufio.NewScanner(r) + // Package data may exceed default buffer size + // Increase the buffer default size by 2 times + buf := make([]byte, 0, 128*1024) + s.Buffer(buf, 128*1024) + s.Split(emptyLineSplit) return &dpkgScanner{Scanner: s} } diff --git a/pkg/fanal/analyzer/sbom/sbom_test.go b/pkg/fanal/analyzer/sbom/sbom_test.go index c6f5b4b33701..3bcb619d402b 100644 --- a/pkg/fanal/analyzer/sbom/sbom_test.go +++ b/pkg/fanal/analyzer/sbom/sbom_test.go @@ -31,6 +31,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { Type: types.Jar, Libraries: types.Packages{ { + ID: "co.elastic.apm:apm-agent:1.36.0", Name: "co.elastic.apm:apm-agent", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -44,6 +45,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { + ID: "co.elastic.apm:apm-agent-cached-lookup-key:1.36.0", Name: "co.elastic.apm:apm-agent-cached-lookup-key", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -57,6 +59,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { + ID: "co.elastic.apm:apm-agent-common:1.36.0", Name: "co.elastic.apm:apm-agent-common", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -70,6 +73,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { + ID: "co.elastic.apm:apm-agent-core:1.36.0", Name: "co.elastic.apm:apm-agent-core", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -89,7 +93,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { FilePath: "opt/bitnami/elasticsearch", Libraries: types.Packages{ { - Name: "elasticsearch", + ID: "Elasticsearch@8.9.1", + Name: "Elasticsearch", Version: "8.9.1", Arch: "arm64", Licenses: []string{"Elastic-2.0"}, @@ -169,7 +174,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { FilePath: "opt/bitnami/postgresql", Libraries: types.Packages{ { - Name: "gdal", + ID: "GDAL@3.7.1", + Name: "GDAL", Version: "3.7.1", Licenses: []string{"MIT"}, Identifier: types.PkgIdentifier{ @@ -181,7 +187,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { - Name: "geos", + ID: "GEOS@3.8.3", + Name: "GEOS", Version: "3.8.3", Licenses: []string{"LGPL-2.1-only"}, Identifier: types.PkgIdentifier{ @@ -193,7 +200,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { - Name: "postgresql", + ID: "PostgreSQL@15.3.0", + Name: "PostgreSQL", Version: "15.3.0", Licenses: []string{"PostgreSQL"}, Identifier: types.PkgIdentifier{ @@ -203,9 +211,15 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { Version: "15.3.0", }, }, + DependsOn: []string{ + "GEOS@3.8.3", + "Proj@6.3.2", + "GDAL@3.7.1", + }, }, { - Name: "proj", + ID: "Proj@6.3.2", + Name: "Proj", Version: "6.3.2", Licenses: []string{"MIT"}, Identifier: types.PkgIdentifier{ diff --git a/pkg/fanal/applier/docker.go b/pkg/fanal/applier/docker.go index 730737e8a370..abcc1ce51958 100644 --- a/pkg/fanal/applier/docker.go +++ b/pkg/fanal/applier/docker.go @@ -263,12 +263,9 @@ func newPURL(pkgType ftypes.TargetType, metadata types.Metadata, pkg ftypes.Pack func aggregate(detail *ftypes.ArtifactDetail) { var apps []ftypes.Application - aggregatedApps := map[ftypes.LangType]*ftypes.Application{ - ftypes.PythonPkg: {Type: ftypes.PythonPkg}, - ftypes.CondaPkg: {Type: ftypes.CondaPkg}, - ftypes.GemSpec: {Type: ftypes.GemSpec}, - ftypes.NodePkg: {Type: ftypes.NodePkg}, - ftypes.Jar: {Type: ftypes.Jar}, + aggregatedApps := make(map[ftypes.LangType]*ftypes.Application) + for _, t := range ftypes.AggregatingTypes { + aggregatedApps[t] = &ftypes.Application{Type: t} } for _, app := range detail.Applications { diff --git a/pkg/fanal/types/const.go b/pkg/fanal/types/const.go index 115850f43978..b46b36a8d425 100644 --- a/pkg/fanal/types/const.go +++ b/pkg/fanal/types/const.go @@ -81,6 +81,14 @@ const ( OCP LangType = "ocp" // Red Hat OpenShift Container Platform ) +var AggregatingTypes = []LangType{ + PythonPkg, + CondaPkg, + GemSpec, + NodePkg, + Jar, +} + // Config files const ( JSON ConfigType = "json" diff --git a/pkg/flag/db_flags.go b/pkg/flag/db_flags.go index 7e018e865a77..58e7809a2152 100644 --- a/pkg/flag/db_flags.go +++ b/pkg/flag/db_flags.go @@ -1,14 +1,17 @@ package flag import ( + "fmt" + + "github.com/google/go-containerregistry/pkg/name" + "go.uber.org/zap" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/db" + "github.com/aquasecurity/trivy/pkg/javadb" "github.com/aquasecurity/trivy/pkg/log" ) -const defaultDBRepository = "ghcr.io/aquasecurity/trivy-db:2" -const defaultJavaDBRepository = "ghcr.io/aquasecurity/trivy-java-db:1" - var ( ResetFlag = Flag[bool]{ Name: "reset", @@ -49,13 +52,13 @@ var ( DBRepositoryFlag = Flag[string]{ Name: "db-repository", ConfigName: "db.repository", - Default: defaultDBRepository, + Default: db.DefaultRepository, Usage: "OCI repository to retrieve trivy-db from", } JavaDBRepositoryFlag = Flag[string]{ Name: "java-db-repository", ConfigName: "db.java-repository", - Default: defaultJavaDBRepository, + Default: javadb.DefaultRepository, Usage: "OCI repository to retrieve trivy-java-db from", } LightFlag = Flag[bool]{ @@ -86,8 +89,8 @@ type DBOptions struct { DownloadJavaDBOnly bool SkipJavaDBUpdate bool NoProgress bool - DBRepository string - JavaDBRepository string + DBRepository name.Reference + JavaDBRepository name.Reference Light bool // deprecated } @@ -145,6 +148,32 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { log.Logger.Warn("'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649") } + var dbRepository, javaDBRepository name.Reference + var err error + if f.DBRepository != nil { + if dbRepository, err = name.ParseReference(f.DBRepository.Value(), name.WithDefaultTag("")); err != nil { + return DBOptions{}, xerrors.Errorf("invalid db repository: %w", err) + } + // Add the schema version if the tag is not specified for backward compatibility. + if t, ok := dbRepository.(name.Tag); ok && t.TagStr() == "" { + dbRepository = t.Tag(fmt.Sprint(db.SchemaVersion)) + log.Logger.Infow("Adding schema version to the DB repository for backward compatibility", + zap.String("repository", dbRepository.String())) + } + } + + if f.JavaDBRepository != nil { + if javaDBRepository, err = name.ParseReference(f.JavaDBRepository.Value(), name.WithDefaultTag("")); err != nil { + return DBOptions{}, xerrors.Errorf("invalid javadb repository: %w", err) + } + // Add the schema version if the tag is not specified for backward compatibility. + if t, ok := javaDBRepository.(name.Tag); ok && t.TagStr() == "" { + javaDBRepository = t.Tag(fmt.Sprint(javadb.SchemaVersion)) + log.Logger.Infow("Adding schema version to the Java DB repository for backward compatibility", + zap.String("repository", javaDBRepository.String())) + } + } + return DBOptions{ Reset: f.Reset.Value(), DownloadDBOnly: downloadDBOnly, @@ -153,7 +182,7 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { SkipJavaDBUpdate: skipJavaDBUpdate, Light: light, NoProgress: f.NoProgress.Value(), - DBRepository: f.DBRepository.Value(), - JavaDBRepository: f.JavaDBRepository.Value(), + DBRepository: dbRepository, + JavaDBRepository: javaDBRepository, }, nil } diff --git a/pkg/flag/db_flags_test.go b/pkg/flag/db_flags_test.go index c590ed49f7a3..b53f29135d74 100644 --- a/pkg/flag/db_flags_test.go +++ b/pkg/flag/db_flags_test.go @@ -1,6 +1,7 @@ package flag_test import ( + "github.com/google/go-containerregistry/pkg/name" "testing" "github.com/spf13/viper" @@ -15,9 +16,11 @@ import ( func TestDBFlagGroup_ToOptions(t *testing.T) { type fields struct { - SkipDBUpdate bool - DownloadDBOnly bool - Light bool + SkipDBUpdate bool + DownloadDBOnly bool + Light bool + DBRepository string + JavaDBRepository string } tests := []struct { name string @@ -29,22 +32,30 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { { name: "happy", fields: fields{ - SkipDBUpdate: true, - DownloadDBOnly: false, + SkipDBUpdate: true, + DownloadDBOnly: false, + DBRepository: "ghcr.io/aquasecurity/trivy-db", + JavaDBRepository: "ghcr.io/aquasecurity/trivy-java-db", }, want: flag.DBOptions{ - SkipDBUpdate: true, - DownloadDBOnly: false, + SkipDBUpdate: true, + DownloadDBOnly: false, + DBRepository: name.Tag{}, // All fields are unexported + JavaDBRepository: name.Tag{}, // All fields are unexported }, assertion: require.NoError, }, { name: "light", fields: fields{ - Light: true, + Light: true, + DBRepository: "ghcr.io/aquasecurity/trivy-db", + JavaDBRepository: "ghcr.io/aquasecurity/trivy-java-db", }, want: flag.DBOptions{ - Light: true, + Light: true, + DBRepository: name.Tag{}, // All fields are unexported + JavaDBRepository: name.Tag{}, // All fields are unexported }, wantLogs: []string{ "'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649", @@ -61,6 +72,17 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { require.ErrorContains(t, err, "--skip-db-update and --download-db-only options can not be specified both") }, }, + { + name: "invalid repo", + fields: fields{ + SkipDBUpdate: true, + DownloadDBOnly: false, + DBRepository: "foo:bar:baz", + }, + assertion: func(t require.TestingT, err error, msgs ...interface{}) { + require.ErrorContains(t, err, "invalid db repository") + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -71,16 +93,20 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { viper.Set(flag.SkipDBUpdateFlag.ConfigName, tt.fields.SkipDBUpdate) viper.Set(flag.DownloadDBOnlyFlag.ConfigName, tt.fields.DownloadDBOnly) viper.Set(flag.LightFlag.ConfigName, tt.fields.Light) + viper.Set(flag.DBRepositoryFlag.ConfigName, tt.fields.DBRepository) + viper.Set(flag.JavaDBRepositoryFlag.ConfigName, tt.fields.JavaDBRepository) // Assert options f := &flag.DBFlagGroup{ - DownloadDBOnly: flag.DownloadDBOnlyFlag.Clone(), - SkipDBUpdate: flag.SkipDBUpdateFlag.Clone(), - Light: flag.LightFlag.Clone(), + DownloadDBOnly: flag.DownloadDBOnlyFlag.Clone(), + SkipDBUpdate: flag.SkipDBUpdateFlag.Clone(), + Light: flag.LightFlag.Clone(), + DBRepository: flag.DBRepositoryFlag.Clone(), + JavaDBRepository: flag.JavaDBRepositoryFlag.Clone(), } got, err := f.ToOptions() tt.assertion(t, err) - assert.Equalf(t, tt.want, got, "ToOptions()") + assert.EqualExportedValues(t, tt.want, got) // Assert log messages var gotMessages []string diff --git a/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer_test.go b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer_test.go new file mode 100644 index 000000000000..04e67c2b6818 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer_test.go @@ -0,0 +1,54 @@ +package accessanalyzer + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/accessanalyzer" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected accessanalyzer.AccessAnalyzer + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Analyzer: + Type: 'AWS::AccessAnalyzer::Analyzer' + Properties: + AnalyzerName: MyAccountAnalyzer +`, + expected: accessanalyzer.AccessAnalyzer{ + Analyzers: []accessanalyzer.Analyzer{ + { + Name: types.StringTest("MyAccountAnalyzer"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Analyzer: + Type: 'AWS::AccessAnalyzer::Analyzer' +`, + expected: accessanalyzer.AccessAnalyzer{ + Analyzers: []accessanalyzer.Analyzer{ + {}, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway_test.go b/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway_test.go new file mode 100644 index 000000000000..8f9e55ef8abd --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway_test.go @@ -0,0 +1,84 @@ +package apigateway + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/apigateway" + v2 "github.com/aquasecurity/trivy/pkg/iac/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected apigateway.APIGateway + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyApi: + Type: 'AWS::ApiGatewayV2::Api' + Properties: + Name: MyApi + ProtocolType: WEBSOCKET + MyStage: + Type: 'AWS::ApiGatewayV2::Stage' + Properties: + StageName: Prod + ApiId: !Ref MyApi + AccessLogSettings: + DestinationArn: some-arn +`, + expected: apigateway.APIGateway{ + V2: v2.APIGateway{ + APIs: []v2.API{ + { + Name: types.StringTest("MyApi"), + ProtocolType: types.StringTest("WEBSOCKET"), + Stages: []v2.Stage{ + { + Name: types.StringTest("Prod"), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: types.StringTest("some-arn"), + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyApi: + Type: 'AWS::ApiGatewayV2::Api' + MyStage: + Type: 'AWS::ApiGatewayV2::Stage' + MyStage2: + Type: 'AWS::ApiGatewayV2::Stage' + Properties: + ApiId: !Ref MyApi +`, + expected: apigateway.APIGateway{ + V2: v2.APIGateway{ + APIs: []v2.API{ + { + Stages: []v2.Stage{{}}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go b/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go index c79f89fda5ea..8e9497a91ec3 100644 --- a/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go +++ b/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go @@ -2,18 +2,18 @@ package apigateway import ( v2 "github.com/aquasecurity/trivy/pkg/iac/providers/aws/apigateway/v2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getApis(cfFile parser2.FileContext) (apis []v2.API) { +func getApis(cfFile parser.FileContext) (apis []v2.API) { apiResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Api") for _, apiRes := range apiResources { api := v2.API{ Metadata: apiRes.Metadata(), - Name: types.StringDefault("", apiRes.Metadata()), - ProtocolType: types.StringDefault("", apiRes.Metadata()), + Name: apiRes.GetStringProperty("Name"), + ProtocolType: apiRes.GetStringProperty("ProtocolType"), Stages: getStages(apiRes.ID(), cfFile), } apis = append(apis, api) @@ -22,7 +22,7 @@ func getApis(cfFile parser2.FileContext) (apis []v2.API) { return apis } -func getStages(apiId string, cfFile parser2.FileContext) []v2.Stage { +func getStages(apiId string, cfFile parser.FileContext) []v2.Stage { var apiStages []v2.Stage stageResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Stage") @@ -43,7 +43,7 @@ func getStages(apiId string, cfFile parser2.FileContext) []v2.Stage { return apiStages } -func getAccessLogging(r *parser2.Resource) v2.AccessLogging { +func getAccessLogging(r *parser.Resource) v2.AccessLogging { loggingProp := r.GetProperty("AccessLogSettings") if loggingProp.IsNil() { diff --git a/pkg/iac/adapters/cloudformation/aws/athena/athena_test.go b/pkg/iac/adapters/cloudformation/aws/athena/athena_test.go new file mode 100644 index 000000000000..097de6fa303d --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/athena/athena_test.go @@ -0,0 +1,61 @@ +package athena + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/athena" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected athena.Athena + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyAthenaWorkGroup: + Type: AWS::Athena::WorkGroup + Properties: + Name: MyCustomWorkGroup + WorkGroupConfiguration: + EnforceWorkGroupConfiguration: true + ResultConfiguration: + EncryptionOption: SSE_KMS +`, + expected: athena.Athena{ + Workgroups: []athena.Workgroup{ + { + Name: types.StringTest("MyCustomWorkGroup"), + EnforceConfiguration: types.BoolTest(true), + Encryption: athena.EncryptionConfiguration{ + Type: types.StringTest("SSE_KMS"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyAthenaWorkGroup: + Type: AWS::Athena::WorkGroup +`, + expected: athena.Athena{ + Workgroups: []athena.Workgroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } + +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront_test.go b/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront_test.go new file mode 100644 index 000000000000..6c0ec7348b33 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront_test.go @@ -0,0 +1,68 @@ +package cloudfront + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudfront" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected cloudfront.Cloudfront + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cloudfrontdistribution: + Type: AWS::CloudFront::Distribution + Properties: + DistributionConfig: + WebACLId: "a1b2c3d4-5678-90ab-cdef-EXAMPLE11111" + Logging: + Bucket: "myawslogbucket.s3.amazonaws.com" + ViewerCertificate: + MinimumProtocolVersion: SSLv3 + DefaultCacheBehavior: + ViewerProtocolPolicy: "redirect-to-https" +`, + expected: cloudfront.Cloudfront{ + Distributions: []cloudfront.Distribution{ + { + WAFID: types.StringTest("a1b2c3d4-5678-90ab-cdef-EXAMPLE11111"), + Logging: cloudfront.Logging{ + Bucket: types.StringTest("myawslogbucket.s3.amazonaws.com"), + }, + ViewerCertificate: cloudfront.ViewerCertificate{ + MinimumProtocolVersion: types.StringTest("SSLv3"), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + ViewerProtocolPolicy: types.StringTest("redirect-to-https"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cloudfrontdistribution: + Type: AWS::CloudFront::Distribution +`, + expected: cloudfront.Cloudfront{ + Distributions: []cloudfront.Distribution{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go b/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go index 0364dc82d052..70c5052bcd55 100644 --- a/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go @@ -2,11 +2,10 @@ package cloudfront import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudfront" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getDistributions(ctx parser2.FileContext) (distributions []cloudfront.Distribution) { +func getDistributions(ctx parser.FileContext) (distributions []cloudfront.Distribution) { distributionResources := ctx.GetResourcesByType("AWS::CloudFront::Distribution") @@ -32,24 +31,15 @@ func getDistributions(ctx parser2.FileContext) (distributions []cloudfront.Distr return distributions } -func getDefaultCacheBehaviour(r *parser2.Resource) cloudfront.CacheBehaviour { +func getDefaultCacheBehaviour(r *parser.Resource) cloudfront.CacheBehaviour { defaultCache := r.GetProperty("DistributionConfig.DefaultCacheBehavior") if defaultCache.IsNil() { return cloudfront.CacheBehaviour{ - Metadata: r.Metadata(), - ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), - } - } - protoProp := r.GetProperty("DistributionConfig.DefaultCacheBehavior.ViewerProtocolPolicy") - if protoProp.IsNotString() { - return cloudfront.CacheBehaviour{ - Metadata: r.Metadata(), - ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), + Metadata: r.Metadata(), } } - return cloudfront.CacheBehaviour{ - Metadata: r.Metadata(), - ViewerProtocolPolicy: protoProp.AsStringValue(), + Metadata: defaultCache.Metadata(), + ViewerProtocolPolicy: defaultCache.GetStringProperty("ViewerProtocolPolicy"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail_test.go b/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail_test.go new file mode 100644 index 000000000000..5dcebb291035 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail_test.go @@ -0,0 +1,64 @@ +package cloudtrail + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected cloudtrail.CloudTrail + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Trail: + Type: AWS::CloudTrail::Trail + Properties: + S3BucketName: MyBucket + IsLogging: true + TrailName: MyTrail + EnableLogFileValidation: true + IsMultiRegionTrail: true + CloudWatchLogsLogGroupArn: cw-arn + KmsKeyId: my-kms-key +`, + expected: cloudtrail.CloudTrail{ + Trails: []cloudtrail.Trail{ + { + Name: types.StringTest("MyTrail"), + BucketName: types.StringTest("MyBucket"), + IsLogging: types.BoolTest(true), + IsMultiRegion: types.BoolTest(true), + EnableLogFileValidation: types.BoolTest(true), + CloudWatchLogsLogGroupArn: types.StringTest("cw-arn"), + KMSKeyID: types.StringTest("my-kms-key"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Trail: + Type: AWS::CloudTrail::Trail + `, + expected: cloudtrail.CloudTrail{ + Trails: []cloudtrail.Trail{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go index 1c6efa85a891..0c4a59e43189 100644 --- a/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go @@ -9,6 +9,5 @@ import ( func Adapt(cfFile parser.FileContext) cloudwatch.CloudWatch { return cloudwatch.CloudWatch{ LogGroups: getLogGroups(cfFile), - Alarms: nil, } } diff --git a/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch_test.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch_test.go new file mode 100644 index 000000000000..c8a7bd95c9a3 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch_test.go @@ -0,0 +1,57 @@ +package cloudwatch + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudwatch" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected cloudwatch.CloudWatch + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myLogGroup: + Type: AWS::Logs::LogGroup + Properties: + LogGroupName: my-log-group + RetentionInDays: 7 + KmsKeyId: my-kms + +`, + expected: cloudwatch.CloudWatch{ + LogGroups: []cloudwatch.LogGroup{ + { + Name: types.StringTest("my-log-group"), + RetentionInDays: types.IntTest(7), + KMSKeyID: types.StringTest("my-kms"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myLogGroup: + Type: AWS::Logs::LogGroup + `, + expected: cloudwatch.CloudWatch{ + LogGroups: []cloudwatch.LogGroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go index 81730f050ecf..09e039129781 100644 --- a/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go @@ -3,7 +3,6 @@ package cloudwatch import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudwatch" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" ) func getLogGroups(ctx parser.FileContext) (logGroups []cloudwatch.LogGroup) { @@ -13,11 +12,9 @@ func getLogGroups(ctx parser.FileContext) (logGroups []cloudwatch.LogGroup) { for _, r := range logGroupResources { group := cloudwatch.LogGroup{ Metadata: r.Metadata(), - Arn: types.StringDefault("", r.Metadata()), Name: r.GetStringProperty("LogGroupName"), KMSKeyID: r.GetStringProperty("KmsKeyId"), - RetentionInDays: r.GetIntProperty("RetentionInDays", 0), - MetricFilters: nil, + RetentionInDays: r.GetIntProperty("RetentionInDays"), } logGroups = append(logGroups, group) } diff --git a/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild_test.go b/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild_test.go new file mode 100644 index 000000000000..06eaa19402e6 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild_test.go @@ -0,0 +1,68 @@ +package codebuild + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/codebuild" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected codebuild.CodeBuild + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Project: + Type: AWS::CodeBuild::Project + Properties: + Artifacts: + EncryptionDisabled: true + SecondaryArtifacts: + - EncryptionDisabled: true +`, + expected: codebuild.CodeBuild{ + Projects: []codebuild.Project{ + { + ArtifactSettings: codebuild.ArtifactSettings{ + EncryptionEnabled: types.BoolTest(false), + }, + SecondaryArtifactSettings: []codebuild.ArtifactSettings{ + { + EncryptionEnabled: types.BoolTest(false), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Project: + Type: AWS::CodeBuild::Project + `, + expected: codebuild.CodeBuild{ + Projects: []codebuild.Project{ + { + ArtifactSettings: codebuild.ArtifactSettings{ + EncryptionEnabled: types.BoolTest(true), + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/codebuild/project.go b/pkg/iac/adapters/cloudformation/aws/codebuild/project.go index 9c0541831223..554fc8afecea 100644 --- a/pkg/iac/adapters/cloudformation/aws/codebuild/project.go +++ b/pkg/iac/adapters/cloudformation/aws/codebuild/project.go @@ -2,11 +2,11 @@ package codebuild import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/codebuild" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getProjects(ctx parser2.FileContext) (projects []codebuild.Project) { +func getProjects(ctx parser.FileContext) (projects []codebuild.Project) { projectResources := ctx.GetResourcesByType("AWS::CodeBuild::Project") @@ -23,7 +23,7 @@ func getProjects(ctx parser2.FileContext) (projects []codebuild.Project) { return projects } -func getSecondaryArtifactSettings(r *parser2.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { +func getSecondaryArtifactSettings(r *parser.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { secondaryArtifactsList := r.GetProperty("SecondaryArtifacts") if secondaryArtifactsList.IsNil() || !secondaryArtifactsList.IsList() { return @@ -44,7 +44,7 @@ func getSecondaryArtifactSettings(r *parser2.Resource) (secondaryArtifacts []cod return secondaryArtifacts } -func getArtifactSettings(r *parser2.Resource) codebuild.ArtifactSettings { +func getArtifactSettings(r *parser.Resource) codebuild.ArtifactSettings { settings := codebuild.ArtifactSettings{ Metadata: r.Metadata(), diff --git a/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go index 1a8f30e018f6..e6dc652da7b1 100644 --- a/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go @@ -1,14 +1,11 @@ package config import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/config" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -29,8 +26,7 @@ Resources: `, expected: config.Config{ ConfigurationAggregrator: config.ConfigurationAggregrator{ - Metadata: types.NewTestMetadata(), - SourceAllRegions: types.Bool(true, types.NewTestMetadata()), + SourceAllRegions: types.BoolTest(true), }, }, }, @@ -46,8 +42,7 @@ Resources: `, expected: config.Config{ ConfigurationAggregrator: config.ConfigurationAggregrator{ - Metadata: types.NewTestMetadata(), - SourceAllRegions: types.Bool(true, types.NewTestMetadata()), + SourceAllRegions: types.BoolTest(true), }, }, }, @@ -55,15 +50,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fs := testutil.CreateFS(t, map[string]string{ - "template.yaml": tt.source, - }) - - p := parser.New() - fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") - require.NoError(t, err) - - testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/config/aggregator.go b/pkg/iac/adapters/cloudformation/aws/config/aggregator.go index 1f34c21591b0..72447398b80f 100644 --- a/pkg/iac/adapters/cloudformation/aws/config/aggregator.go +++ b/pkg/iac/adapters/cloudformation/aws/config/aggregator.go @@ -2,11 +2,11 @@ package config import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/config" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getConfigurationAggregator(ctx parser2.FileContext) config.ConfigurationAggregrator { +func getConfigurationAggregator(ctx parser.FileContext) config.ConfigurationAggregrator { aggregator := config.ConfigurationAggregrator{ Metadata: iacTypes.NewUnmanagedMetadata(), @@ -25,7 +25,7 @@ func getConfigurationAggregator(ctx parser2.FileContext) config.ConfigurationAgg } } -func isSourcingAllRegions(r *parser2.Resource) iacTypes.BoolValue { +func isSourcingAllRegions(r *parser.Resource) iacTypes.BoolValue { accountProp := r.GetProperty("AccountAggregationSources") if accountProp.IsNotNil() && accountProp.IsList() { diff --git a/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go b/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go index 568fcfb44f72..f37467dc4100 100644 --- a/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go @@ -2,11 +2,11 @@ package documentdb import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/documentdb" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []documentdb.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []documentdb.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::DocDB::DBCluster") @@ -28,13 +28,13 @@ func getClusters(ctx parser2.FileContext) (clusters []documentdb.Cluster) { return clusters } -func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser2.FileContext) { +func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser.FileContext) { instanceResources := ctx.GetResourcesByType("AWS::DocDB::DBInstance") for _, r := range instanceResources { clusterIdentifier := r.GetStringProperty("DBClusterIdentifier") - if clusterIdentifier == cluster.Identifier { + if cluster.Identifier.EqualTo(clusterIdentifier.Value()) { cluster.Instances = append(cluster.Instances, documentdb.Instance{ Metadata: r.Metadata(), KMSKeyID: cluster.KMSKeyID, @@ -43,7 +43,7 @@ func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser2.FileConte } } -func getLogExports(r *parser2.Resource) (logExports []types.StringValue) { +func getLogExports(r *parser.Resource) (logExports []types.StringValue) { exportsList := r.GetProperty("EnableCloudwatchLogsExports") diff --git a/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb_test.go b/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb_test.go new file mode 100644 index 000000000000..3e60155e9dfb --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb_test.go @@ -0,0 +1,79 @@ +package documentdb + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/documentdb" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected documentdb.DocumentDB + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + myDBCluster: + Type: 'AWS::DocDB::DBCluster' + Properties: + BackupRetentionPeriod: 8 + DBClusterIdentifier: sample-cluster + KmsKeyId: your-kms-key-id + StorageEncrypted: true + EnableCloudwatchLogsExports: + - audit + - general + myDBInstance: + Type: 'AWS::DocDB::DBInstance' + Properties: + DBClusterIdentifier: sample-cluster + KmsKeyId: your-kms-key-id +`, + expected: documentdb.DocumentDB{ + Clusters: []documentdb.Cluster{ + { + Identifier: types.StringTest("sample-cluster"), + BackupRetentionPeriod: types.IntTest(8), + KMSKeyID: types.StringTest("your-kms-key-id"), + StorageEncrypted: types.BoolTest(true), + EnabledLogExports: []types.StringValue{ + types.StringTest("audit"), + types.StringTest("general"), + }, + Instances: []documentdb.Instance{ + { + KMSKeyID: types.StringTest("your-kms-key-id"), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myDBCluster: + Type: 'AWS::DocDB::DBCluster' + `, + expected: documentdb.DocumentDB{ + Clusters: []documentdb.Cluster{ + { + BackupRetentionPeriod: types.IntTest(1), + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb_test.go b/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb_test.go new file mode 100644 index 000000000000..ce62e85cde5e --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb_test.go @@ -0,0 +1,55 @@ +package dynamodb + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/dynamodb" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected dynamodb.DynamoDB + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + daxCluster: + Type: AWS::DAX::Cluster + Properties: + SSESpecification: + SSEEnabled: true +`, + expected: dynamodb.DynamoDB{ + DAXClusters: []dynamodb.DAXCluster{ + { + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Enabled: types.BoolTest(true), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + daxCluster: + Type: AWS::DAX::Cluster + `, + expected: dynamodb.DynamoDB{ + DAXClusters: []dynamodb.DAXCluster{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go index 7e7ece3df765..ac05f8f7b263 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go @@ -1,14 +1,11 @@ package ec2 import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -18,7 +15,7 @@ func TestAdapt(t *testing.T) { expected ec2.EC2 }{ { - name: "ec2 instance", + name: "complete", source: `AWSTemplateFormatVersion: 2010-09-09 Resources: MyEC2Instance: @@ -36,27 +33,155 @@ Resources: Encrypted: true - DeviceName: "/dev/sdk" NoDevice: {} + NewVolume: + Type: AWS::EC2::Volume + Properties: + KmsKeyId: alias/my_cmk + Encrypted: true + mySubnet: + Type: AWS::EC2::Subnet + Properties: + MapPublicIpOnLaunch: true + InstanceSecurityGroup: + Type: AWS::EC2::SecurityGroup + Properties: + GroupName: default + GroupDescription: Allow http to client host + VpcId: vpc-id + SecurityGroupIngress: + - IpProtocol: tcp + Description: ingress + FromPort: 80 + ToPort: 80 + CidrIp: 0.0.0.0/0 + SecurityGroupEgress: + - IpProtocol: tcp + Description: egress + FromPort: 80 + ToPort: 80 + CidrIp: 0.0.0.0/0 + myNetworkAcl: + Type: AWS::EC2::NetworkAcl + Properties: + VpcId: vpc-1122334455aabbccd + InboundRule: + Type: AWS::EC2::NetworkAclEntry + Properties: + NetworkAclId: + Ref: myNetworkAcl + Egress: true + Protocol: 6 + RuleAction: allow + CidrBlock: 172.16.0.0/24 + myLaunchConfig: + Type: AWS::AutoScaling::LaunchConfiguration + Properties: + LaunchConfigurationName: test-cfg + InstanceId: !Ref MyEC2Instance + AssociatePublicIpAddress: true + SecurityGroups: + - !Ref InstanceSecurityGroup + UserData: test + BlockDeviceMappings: + - DeviceName: /dev/sda1 + Ebs: + VolumeSize: '30' + VolumeType: gp3 + Encrypted: true + - DeviceName: /dev/sdm + Ebs: + VolumeSize: '100' + DeleteOnTermination: false + MetadataOptions: + HttpTokens: required + HttpEndpoint: disabled `, expected: ec2.EC2{ Instances: []ec2.Instance{ { - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ HttpEndpoint: types.StringDefault("enabled", types.NewTestMetadata()), HttpTokens: types.StringDefault("optional", types.NewTestMetadata()), }, RootBlockDevice: &ec2.BlockDevice{ - Metadata: types.NewTestMetadata(), Encrypted: types.BoolDefault(true, types.NewTestMetadata()), }, EBSBlockDevices: []*ec2.BlockDevice{ { - Metadata: types.NewTestMetadata(), Encrypted: types.BoolDefault(false, types.NewTestMetadata()), }, }, }, }, + Volumes: []ec2.Volume{ + { + Encryption: ec2.Encryption{ + KMSKeyID: types.StringTest("alias/my_cmk"), + Enabled: types.BoolTest(true), + }, + }, + }, + Subnets: []ec2.Subnet{ + { + MapPublicIpOnLaunch: types.BoolTest(true), + }, + }, + SecurityGroups: []ec2.SecurityGroup{ + { + IsDefault: types.BoolTest(true), + Description: types.StringTest("Allow http to client host"), + VPCID: types.StringTest("vpc-id"), + IngressRules: []ec2.SecurityGroupRule{ + { + Description: types.StringTest("ingress"), + CIDRs: []types.StringValue{ + types.StringTest("0.0.0.0/0"), + }, + }, + }, + EgressRules: []ec2.SecurityGroupRule{ + { + Description: types.StringTest("egress"), + CIDRs: []types.StringValue{ + types.StringTest("0.0.0.0/0"), + }, + }, + }, + }, + }, + NetworkACLs: []ec2.NetworkACL{ + { + Rules: []ec2.NetworkACLRule{ + { + Type: types.StringTest(ec2.TypeEgress), + Action: types.StringTest(ec2.ActionAllow), + Protocol: types.StringTest("6"), + CIDRs: []types.StringValue{ + types.StringTest("172.16.0.0/24"), + }, + }, + }, + }, + }, + LaunchConfigurations: []ec2.LaunchConfiguration{ + { + Name: types.StringTest("test-cfg"), + AssociatePublicIP: types.BoolTest(true), + RootBlockDevice: &ec2.BlockDevice{ + Encrypted: types.BoolTest(true), + }, + EBSBlockDevices: []*ec2.BlockDevice{ + { + Encrypted: types.BoolTest(false), + }, + }, + UserData: types.StringTest("test"), + MetadataOptions: ec2.MetadataOptions{ + HttpTokens: types.StringTest("required"), + HttpEndpoint: types.StringTest("disabled"), + }, + }, + }, }, }, { @@ -81,27 +206,23 @@ Resources: expected: ec2.EC2{ LaunchTemplates: []ec2.LaunchTemplate{ { - Metadata: types.NewTestMetadata(), - Name: types.String("MyTemplate", types.NewTestMetadata()), + Name: types.StringTest("MyTemplate"), Instance: ec2.Instance{ - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, }, }, }, Instances: []ec2.Instance{ { - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, RootBlockDevice: &ec2.BlockDevice{ - Metadata: types.NewTestMetadata(), - Encrypted: types.Bool(false, types.NewTestMetadata()), + Encrypted: types.BoolTest(false), }, }, }, @@ -129,27 +250,23 @@ Resources: expected: ec2.EC2{ LaunchTemplates: []ec2.LaunchTemplate{ { - Metadata: types.NewTestMetadata(), - Name: types.String("MyTemplate", types.NewTestMetadata()), + Name: types.StringTest("MyTemplate"), Instance: ec2.Instance{ - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, }, }, }, Instances: []ec2.Instance{ { - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, RootBlockDevice: &ec2.BlockDevice{ - Metadata: types.NewTestMetadata(), - Encrypted: types.Bool(false, types.NewTestMetadata()), + Encrypted: types.BoolTest(false), }, }, }, @@ -159,16 +276,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - - fsys := testutil.CreateFS(t, map[string]string{ - "main.yaml": tt.source, - }) - - fctx, err := parser.New().ParseFile(context.TODO(), fsys, "main.yaml") - require.NoError(t, err) - - adapted := Adapt(*fctx) - testutil.AssertDefsecEqual(t, tt.expected, adapted) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/instance.go b/pkg/iac/adapters/cloudformation/aws/ec2/instance.go index 8a7952f9b809..7b6f149e0168 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/instance.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/instance.go @@ -2,11 +2,11 @@ package ec2 import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getInstances(ctx parser2.FileContext) (instances []ec2.Instance) { +func getInstances(ctx parser.FileContext) (instances []ec2.Instance) { instanceResources := ctx.GetResourcesByType("AWS::EC2::Instance") for _, r := range instanceResources { @@ -48,7 +48,7 @@ func getInstances(ctx parser2.FileContext) (instances []ec2.Instance) { return instances } -func findRelatedLaunchTemplate(fctx parser2.FileContext, r *parser2.Resource) (ec2.LaunchTemplate, bool) { +func findRelatedLaunchTemplate(fctx parser.FileContext, r *parser.Resource) (ec2.LaunchTemplate, bool) { launchTemplateRef := r.GetProperty("LaunchTemplate.LaunchTemplateName") if launchTemplateRef.IsString() { res := findLaunchTemplateByName(fctx, launchTemplateRef) @@ -69,7 +69,7 @@ func findRelatedLaunchTemplate(fctx parser2.FileContext, r *parser2.Resource) (e return adaptLaunchTemplate(resource), true } -func findLaunchTemplateByName(fctx parser2.FileContext, prop *parser2.Property) *parser2.Resource { +func findLaunchTemplateByName(fctx parser.FileContext, prop *parser.Property) *parser.Resource { for _, res := range fctx.GetResourcesByType("AWS::EC2::LaunchTemplate") { templateName := res.GetProperty("LaunchTemplateName") if templateName.IsNotString() { @@ -84,7 +84,7 @@ func findLaunchTemplateByName(fctx parser2.FileContext, prop *parser2.Property) return nil } -func getBlockDevices(r *parser2.Resource) []*ec2.BlockDevice { +func getBlockDevices(r *parser.Resource) []*ec2.BlockDevice { var blockDevices []*ec2.BlockDevice devicesProp := r.GetProperty("BlockDeviceMappings") diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go b/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go index 9dcd80f5d47f..e99459b5d4f0 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go @@ -13,14 +13,14 @@ func getLaunchConfigurations(file parser.FileContext) (launchConfigurations []ec launchConfig := ec2.LaunchConfiguration{ Metadata: r.Metadata(), - Name: r.GetStringProperty("Name"), + Name: r.GetStringProperty("LaunchConfigurationName"), AssociatePublicIP: r.GetBoolProperty("AssociatePublicIpAddress"), MetadataOptions: ec2.MetadataOptions{ Metadata: r.Metadata(), HttpTokens: types.StringDefault("optional", r.Metadata()), HttpEndpoint: types.StringDefault("enabled", r.Metadata()), }, - UserData: r.GetStringProperty("UserData", ""), + UserData: r.GetStringProperty("UserData"), } if opts := r.GetProperty("MetadataOptions"); opts.IsNotNil() { diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go b/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go index e22ac9abed3d..c138ed3284e1 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go @@ -2,11 +2,11 @@ package ec2 import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getLaunchTemplates(file parser2.FileContext) (templates []ec2.LaunchTemplate) { +func getLaunchTemplates(file parser.FileContext) (templates []ec2.LaunchTemplate) { launchConfigResources := file.GetResourcesByType("AWS::EC2::LaunchTemplate") for _, r := range launchConfigResources { @@ -15,7 +15,7 @@ func getLaunchTemplates(file parser2.FileContext) (templates []ec2.LaunchTemplat return templates } -func adaptLaunchTemplate(r *parser2.Resource) ec2.LaunchTemplate { +func adaptLaunchTemplate(r *parser.Resource) ec2.LaunchTemplate { launchTemplate := ec2.LaunchTemplate{ Metadata: r.Metadata(), Name: r.GetStringProperty("LaunchTemplateName", ""), diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go index 687fd12d4366..72546aa116e0 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go @@ -2,11 +2,11 @@ package ec2 import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getSecurityGroups(ctx parser2.FileContext) (groups []ec2.SecurityGroup) { +func getSecurityGroups(ctx parser.FileContext) (groups []ec2.SecurityGroup) { for _, r := range ctx.GetResourcesByType("AWS::EC2::SecurityGroup") { group := ec2.SecurityGroup{ Metadata: r.Metadata(), @@ -22,7 +22,7 @@ func getSecurityGroups(ctx parser2.FileContext) (groups []ec2.SecurityGroup) { return groups } -func getIngressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { +func getIngressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { if ingressProp := r.GetProperty("SecurityGroupIngress"); ingressProp.IsList() { for _, ingress := range ingressProp.AsList() { rule := ec2.SecurityGroupRule{ @@ -45,7 +45,7 @@ func getIngressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { return sgRules } -func getEgressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { +func getEgressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { if egressProp := r.GetProperty("SecurityGroupEgress"); egressProp.IsList() { for _, egress := range egressProp.AsList() { rule := ec2.SecurityGroupRule{ diff --git a/pkg/iac/adapters/cloudformation/aws/ecr/ecr_test.go b/pkg/iac/adapters/cloudformation/aws/ecr/ecr_test.go new file mode 100644 index 000000000000..cb3e4b6b4b8d --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/ecr/ecr_test.go @@ -0,0 +1,102 @@ +package ecr + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecr" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected ecr.ECR + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + +`, + expected: ecr.ECR{}, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyRepository: + Type: AWS::ECR::Repository + Properties: + RepositoryName: "test-repository" + ImageScanningConfiguration: + ScanOnPush: true + EncryptionConfiguration: + EncryptionType: KMS + KmsKey: mykey + ImageTagMutability: IMMUTABLE + RepositoryPolicyText: + Version: "2012-10-17" + Statement: + - + Sid: AllowPushPull + Effect: Allow + Principal: + AWS: + - "arn:aws:iam::123456789012:user/Alice" + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + `, + expected: ecr.ECR{ + Repositories: []ecr.Repository{ + { + ImageTagsImmutable: types.BoolTest(true), + ImageScanning: ecr.ImageScanning{ + ScanOnPush: types.BoolTest(true), + }, + Encryption: ecr.Encryption{ + Type: types.StringTest("KMS"), + KMSKeyID: types.StringTest("mykey"), + }, + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithSid("AllowPushPull"). + WithEffect("Allow"). + WithAWSPrincipals( + []string{"arn:aws:iam::123456789012:user/Alice"}, + ). + WithActions( + []string{ + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + }, + ). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ecr/repository.go b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go index 886be64037a3..2c08d57a29c6 100644 --- a/pkg/iac/adapters/cloudformation/aws/ecr/repository.go +++ b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go @@ -7,11 +7,11 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecr" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getRepositories(ctx parser2.FileContext) (repositories []ecr.Repository) { +func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { repositoryResources := ctx.GetResourcesByType("AWS::ECR::Repository") @@ -57,7 +57,7 @@ func getRepositories(ctx parser2.FileContext) (repositories []ecr.Repository) { return repositories } -func getPolicy(r *parser2.Resource) (*iam.Policy, error) { +func getPolicy(r *parser.Resource) (*iam.Policy, error) { policyProp := r.GetProperty("RepositoryPolicyText") if policyProp.IsNil() { return nil, fmt.Errorf("missing policy") @@ -79,13 +79,10 @@ func getPolicy(r *parser2.Resource) (*iam.Policy, error) { }, nil } -func hasImmutableImageTags(r *parser2.Resource) iacTypes.BoolValue { +func hasImmutableImageTags(r *parser.Resource) iacTypes.BoolValue { mutabilityProp := r.GetProperty("ImageTagMutability") if mutabilityProp.IsNil() { return iacTypes.BoolDefault(false, r.Metadata()) } - if !mutabilityProp.EqualTo("IMMUTABLE") { - return iacTypes.Bool(false, mutabilityProp.Metadata()) - } - return iacTypes.Bool(true, mutabilityProp.Metadata()) + return iacTypes.Bool(mutabilityProp.EqualTo("IMMUTABLE"), mutabilityProp.Metadata()) } diff --git a/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go b/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go index 6359dbc4cc93..e3964076d25e 100644 --- a/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go @@ -2,11 +2,11 @@ package ecs import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecs" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []ecs.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []ecs.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::ECS::Cluster") @@ -24,7 +24,7 @@ func getClusters(ctx parser2.FileContext) (clusters []ecs.Cluster) { return clusters } -func getClusterSettings(r *parser2.Resource) ecs.ClusterSettings { +func getClusterSettings(r *parser.Resource) ecs.ClusterSettings { clusterSettings := ecs.ClusterSettings{ Metadata: r.Metadata(), @@ -45,7 +45,7 @@ func getClusterSettings(r *parser2.Resource) ecs.ClusterSettings { return clusterSettings } -func checkProperty(setting *parser2.Property, clusterSettings *ecs.ClusterSettings) { +func checkProperty(setting *parser.Property, clusterSettings *ecs.ClusterSettings) { settingMap := setting.AsMap() name := settingMap["Name"] if name.IsNotNil() && name.EqualTo("containerInsights") { diff --git a/pkg/iac/adapters/cloudformation/aws/ecs/ecs_test.go b/pkg/iac/adapters/cloudformation/aws/ecs/ecs_test.go new file mode 100644 index 000000000000..c6323a1df926 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/ecs/ecs_test.go @@ -0,0 +1,108 @@ +package ecs + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecs" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected ecs.ECS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + ECSCluster: + Type: 'AWS::ECS::Cluster' + Properties: + ClusterName: MyFargateCluster + ClusterSettings: + - Name: containerInsights + Value: enabled + taskdefinition: + Type: AWS::ECS::TaskDefinition + Properties: + ContainerDefinitions: + - + Name: "busybox" + Image: "busybox" + Cpu: 256 + Memory: 512 + Essential: true + Privileged: true + Environment: + - Name: entryPoint + Value: 'sh, -c' + Volumes: + - + Host: + SourcePath: "/var/lib/docker/vfs/dir/" + Name: "my-vol" + EFSVolumeConfiguration: + TransitEncryption: enabled +`, + expected: ecs.ECS{ + Clusters: []ecs.Cluster{ + { + Settings: ecs.ClusterSettings{ + ContainerInsightsEnabled: types.BoolTest(true), + }, + }, + }, + TaskDefinitions: []ecs.TaskDefinition{ + { + Volumes: []ecs.Volume{ + { + EFSVolumeConfiguration: ecs.EFSVolumeConfiguration{ + TransitEncryptionEnabled: types.BoolTest(true), + }, + }, + }, + ContainerDefinitions: []ecs.ContainerDefinition{ + { + Name: types.StringTest("busybox"), + Image: types.StringTest("busybox"), + CPU: types.IntTest(256), + Memory: types.IntTest(512), + Essential: types.BoolTest(true), + Privileged: types.BoolTest(true), + Environment: []ecs.EnvVar{ + { + Name: "entryPoint", + Value: "sh, -c", + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ECSCluster: + Type: 'AWS::ECS::Cluster' + taskdefinition: + Type: AWS::ECS::TaskDefinition + `, + expected: ecs.ECS{ + Clusters: []ecs.Cluster{{}}, + TaskDefinitions: []ecs.TaskDefinition{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go b/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go index cdb9ae08ab45..9c2e342bb6f3 100644 --- a/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go +++ b/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go @@ -2,11 +2,11 @@ package ecs import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecs" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getTaskDefinitions(ctx parser2.FileContext) (taskDefinitions []ecs.TaskDefinition) { +func getTaskDefinitions(ctx parser.FileContext) (taskDefinitions []ecs.TaskDefinition) { taskDefResources := ctx.GetResourcesByType("AWS::ECS::TaskDefinition") @@ -23,7 +23,7 @@ func getTaskDefinitions(ctx parser2.FileContext) (taskDefinitions []ecs.TaskDefi return taskDefinitions } -func getContainerDefinitions(r *parser2.Resource) ([]ecs.ContainerDefinition, error) { +func getContainerDefinitions(r *parser.Resource) ([]ecs.ContainerDefinition, error) { var definitions []ecs.ContainerDefinition containerDefs := r.GetProperty("ContainerDefinitions") if containerDefs.IsNil() || containerDefs.IsNotList() { @@ -36,19 +36,19 @@ func getContainerDefinitions(r *parser2.Resource) ([]ecs.ContainerDefinition, er if envVarsList.IsNotNil() && envVarsList.IsList() { for _, envVar := range envVarsList.AsList() { envVars = append(envVars, ecs.EnvVar{ - Name: envVar.GetStringProperty("Name", "").Value(), - Value: envVar.GetStringProperty("Value", "").Value(), + Name: envVar.GetStringProperty("Name").Value(), + Value: envVar.GetStringProperty("Value").Value(), }) } } definition := ecs.ContainerDefinition{ Metadata: containerDef.Metadata(), - Name: containerDef.GetStringProperty("Name", ""), - Image: containerDef.GetStringProperty("Image", ""), - CPU: containerDef.GetIntProperty("CPU", 1), - Memory: containerDef.GetIntProperty("Memory", 128), - Essential: containerDef.GetBoolProperty("Essential", false), - Privileged: containerDef.GetBoolProperty("Privileged", false), + Name: containerDef.GetStringProperty("Name"), + Image: containerDef.GetStringProperty("Image"), + CPU: containerDef.GetIntProperty("Cpu"), + Memory: containerDef.GetIntProperty("Memory"), + Essential: containerDef.GetBoolProperty("Essential"), + Privileged: containerDef.GetBoolProperty("Privileged"), Environment: envVars, PortMappings: nil, } @@ -60,7 +60,7 @@ func getContainerDefinitions(r *parser2.Resource) ([]ecs.ContainerDefinition, er return definitions, nil } -func getVolumes(r *parser2.Resource) (volumes []ecs.Volume) { +func getVolumes(r *parser.Resource) (volumes []ecs.Volume) { volumesList := r.GetProperty("Volumes") if volumesList.IsNil() || volumesList.IsNotList() { @@ -76,7 +76,7 @@ func getVolumes(r *parser2.Resource) (volumes []ecs.Volume) { }, } transitProp := v.GetProperty("EFSVolumeConfiguration.TransitEncryption") - if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser2.IgnoreCase) { + if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser.IgnoreCase) { volume.EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(true, transitProp.Metadata()) } diff --git a/pkg/iac/adapters/cloudformation/aws/efs/efs_test.go b/pkg/iac/adapters/cloudformation/aws/efs/efs_test.go new file mode 100644 index 000000000000..a22d769020b6 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/efs/efs_test.go @@ -0,0 +1,52 @@ +package efs + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/efs" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected efs.EFS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + FileSystemResource: + Type: 'AWS::EFS::FileSystem' + Properties: + Encrypted: true +`, + expected: efs.EFS{ + FileSystems: []efs.FileSystem{ + { + Encrypted: types.BoolTest(true), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + FileSystemResource: + Type: 'AWS::EFS::FileSystem' + `, + expected: efs.EFS{ + FileSystems: []efs.FileSystem{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/eks/cluster.go b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go index 07adedf06c21..c960924e33d4 100644 --- a/pkg/iac/adapters/cloudformation/aws/eks/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go @@ -2,30 +2,21 @@ package eks import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/eks" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []eks.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::EKS::Cluster") for _, r := range clusterResources { cluster := eks.Cluster{ - Metadata: r.Metadata(), - // Logging not supported for cloudformation https://github.com/aws/containers-roadmap/issues/242 - Logging: eks.Logging{ - Metadata: r.Metadata(), - API: iacTypes.BoolUnresolvable(r.Metadata()), - Audit: iacTypes.BoolUnresolvable(r.Metadata()), - Authenticator: iacTypes.BoolUnresolvable(r.Metadata()), - ControllerManager: iacTypes.BoolUnresolvable(r.Metadata()), - Scheduler: iacTypes.BoolUnresolvable(r.Metadata()), - }, - Encryption: getEncryptionConfig(r), - // endpoint protection not supported - https://github.com/aws/containers-roadmap/issues/242 - PublicAccessEnabled: iacTypes.BoolUnresolvable(r.Metadata()), - PublicAccessCIDRs: nil, + Metadata: r.Metadata(), + Logging: getLogging(r), + Encryption: getEncryptionConfig(r), + PublicAccessEnabled: r.GetBoolProperty("ResourcesVpcConfig.EndpointPublicAccess"), + PublicAccessCIDRs: getPublicCIDRs(r), } clusters = append(clusters, cluster) @@ -33,24 +24,71 @@ func getClusters(ctx parser2.FileContext) (clusters []eks.Cluster) { return clusters } -func getEncryptionConfig(r *parser2.Resource) eks.Encryption { +func getPublicCIDRs(r *parser.Resource) []iacTypes.StringValue { + publicAccessCidrs := r.GetProperty("ResourcesVpcConfig.PublicAccessCidrs") + if publicAccessCidrs.IsNotList() { + return nil + } + + var cidrs []iacTypes.StringValue + for _, el := range publicAccessCidrs.AsList() { + cidrs = append(cidrs, el.AsStringValue()) + } + + return cidrs +} + +func getEncryptionConfig(r *parser.Resource) eks.Encryption { + + encryptionConfigs := r.GetProperty("EncryptionConfig") + if encryptionConfigs.IsNotList() { + return eks.Encryption{ + Metadata: r.Metadata(), + } + } + + for _, encryptionConfig := range encryptionConfigs.AsList() { + resources := encryptionConfig.GetProperty("Resources") + hasSecrets := resources.IsList() && resources.Contains("secrets") + return eks.Encryption{ + Metadata: encryptionConfig.Metadata(), + KMSKeyID: encryptionConfig.GetStringProperty("Provider.KeyArn"), + Secrets: iacTypes.Bool(hasSecrets, resources.Metadata()), + } + } - encryption := eks.Encryption{ + return eks.Encryption{ Metadata: r.Metadata(), - Secrets: iacTypes.BoolDefault(false, r.Metadata()), - KMSKeyID: iacTypes.StringDefault("", r.Metadata()), - } - - if encProp := r.GetProperty("EncryptionConfig"); encProp.IsNotNil() { - encryption.Metadata = encProp.Metadata() - encryption.KMSKeyID = encProp.GetStringProperty("Provider.KeyArn") - resourcesProp := encProp.GetProperty("Resources") - if resourcesProp.IsList() { - if resourcesProp.Contains("secrets") { - encryption.Secrets = iacTypes.Bool(true, resourcesProp.Metadata()) - } + } +} + +func getLogging(r *parser.Resource) eks.Logging { + enabledTypes := r.GetProperty("Logging.ClusterLogging.EnabledTypes") + if enabledTypes.IsNotList() { + return eks.Logging{ + Metadata: r.Metadata(), } } - return encryption + logging := eks.Logging{ + Metadata: enabledTypes.Metadata(), + } + + for _, typeConf := range enabledTypes.AsList() { + switch typ := typeConf.GetProperty("Type"); typ.AsString() { + case "api": + logging.API = iacTypes.Bool(true, typ.Metadata()) + case "audit": + logging.Audit = iacTypes.Bool(true, typ.Metadata()) + case "authenticator": + logging.Authenticator = iacTypes.Bool(true, typ.Metadata()) + case "controllerManager": + logging.ControllerManager = iacTypes.Bool(true, typ.Metadata()) + case "scheduler": + logging.Scheduler = iacTypes.Bool(true, typ.Metadata()) + } + + } + + return logging } diff --git a/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go b/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go new file mode 100644 index 000000000000..36981f6bf544 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go @@ -0,0 +1,81 @@ +package eks + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/eks" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected eks.EKS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + EKSCluster: + Type: AWS::EKS::Cluster + Properties: + Logging: + ClusterLogging: + EnabledTypes: + - Type: api + - Type: audit + - Type: authenticator + - Type: controllerManager + - Type: scheduler + EncryptionConfig: + - Provider: + KeyArn: alias/mykey + Resources: [secrets] + ResourcesVpcConfig: + EndpointPublicAccess: True + PublicAccessCidrs: + - 0.0.0.0/0 +`, + expected: eks.EKS{ + Clusters: []eks.Cluster{ + { + Logging: eks.Logging{ + API: types.BoolTest(true), + Audit: types.BoolTest(true), + Authenticator: types.BoolTest(true), + ControllerManager: types.BoolTest(true), + Scheduler: types.BoolTest(true), + }, + Encryption: eks.Encryption{ + KMSKeyID: types.StringTest("alias/mykey"), + Secrets: types.BoolTest(true), + }, + PublicAccessEnabled: types.BoolTest(true), + PublicAccessCIDRs: []types.StringValue{ + types.StringTest("0.0.0.0/0"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + EKSCluster: + Type: AWS::EKS::Cluster + `, + expected: eks.EKS{ + Clusters: []eks.Cluster{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache_test.go b/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache_test.go new file mode 100644 index 000000000000..e7e3d018b14c --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache_test.go @@ -0,0 +1,82 @@ +package elasticache + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected elasticache.ElastiCache + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: cache.t2.micro + SnapshotRetentionLimit: 5 + myReplicationGroup: + Type: 'AWS::ElastiCache::ReplicationGroup' + Properties: + TransitEncryptionEnabled: true + AtRestEncryptionEnabled: true + mySecGroup: + Type: AWS::ElastiCache::SecurityGroup + Properties: + Description: test +`, + expected: elasticache.ElastiCache{ + Clusters: []elasticache.Cluster{ + { + Engine: types.StringTest("memcached"), + NodeType: types.StringTest("cache.t2.micro"), + SnapshotRetentionLimit: types.IntTest(5), + }, + }, + ReplicationGroups: []elasticache.ReplicationGroup{ + { + TransitEncryptionEnabled: types.BoolTest(true), + AtRestEncryptionEnabled: types.BoolTest(true), + }, + }, + SecurityGroups: []elasticache.SecurityGroup{ + { + Description: types.StringTest("test"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + myReplicationGroup: + Type: 'AWS::ElastiCache::ReplicationGroup' + mySecGroup: + Type: AWS::ElastiCache::SecurityGroup + `, + expected: elasticache.ElastiCache{ + Clusters: []elasticache.Cluster{{}}, + ReplicationGroups: []elasticache.ReplicationGroup{{}}, + SecurityGroups: []elasticache.SecurityGroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go index 5ff46bc41cbd..2ca77a5d7448 100644 --- a/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go @@ -13,11 +13,10 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { for _, r := range domainResources { domain := elasticsearch.Domain{ - Metadata: r.Metadata(), - DomainName: r.GetStringProperty("DomainName"), - AccessPolicies: r.GetStringProperty("AccessPolicies"), - DedicatedMasterEnabled: r.GetBoolProperty("ElasticsearchClusterConfig.DedicatedMasterEnabled"), - VpcId: iacTypes.String("", r.Metadata()), + Metadata: r.Metadata(), + DomainName: r.GetStringProperty("DomainName"), + AccessPolicies: r.GetStringProperty("AccessPolicies"), + VpcId: iacTypes.String("", r.Metadata()), LogPublishing: elasticsearch.LogPublishing{ Metadata: r.Metadata(), AuditEnabled: iacTypes.BoolDefault(false, r.Metadata()), @@ -35,7 +34,6 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { Endpoint: elasticsearch.Endpoint{ Metadata: r.Metadata(), EnforceHTTPS: iacTypes.BoolDefault(false, r.Metadata()), - TLSPolicy: iacTypes.StringDefault("Policy-Min-TLS-1-0-2019-07", r.Metadata()), }, ServiceSoftwareOptions: elasticsearch.ServiceSoftwareOptions{ Metadata: r.Metadata(), @@ -46,25 +44,31 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { }, } + if r.Type() == "AWS::OpenSearchService::Domain" { + domain.DedicatedMasterEnabled = r.GetBoolProperty("ClusterConfig.DedicatedMasterEnabled") + } else { + domain.DedicatedMasterEnabled = r.GetBoolProperty("ElasticsearchClusterConfig.DedicatedMasterEnabled") + } + if prop := r.GetProperty("LogPublishingOptions"); prop.IsNotNil() { domain.LogPublishing = elasticsearch.LogPublishing{ Metadata: prop.Metadata(), - AuditEnabled: prop.GetBoolProperty("AUDIT_LOGS.Enabled", false), - CloudWatchLogGroupArn: prop.GetStringProperty("CloudWatchLogsLogGroupArn"), + AuditEnabled: prop.GetBoolProperty("AUDIT_LOGS.Enabled"), + CloudWatchLogGroupArn: prop.GetStringProperty("AUDIT_LOGS.CloudWatchLogsLogGroupArn"), } } if prop := r.GetProperty("NodeToNodeEncryptionOptions"); prop.IsNotNil() { domain.TransitEncryption = elasticsearch.TransitEncryption{ Metadata: prop.Metadata(), - Enabled: prop.GetBoolProperty("Enabled", false), + Enabled: prop.GetBoolProperty("Enabled"), } } if prop := r.GetProperty("EncryptionAtRestOptions"); prop.IsNotNil() { domain.AtRestEncryption = elasticsearch.AtRestEncryption{ Metadata: prop.Metadata(), - Enabled: prop.GetBoolProperty("Enabled", false), + Enabled: prop.GetBoolProperty("Enabled"), KmsKeyId: prop.GetStringProperty("KmsKeyId"), } } @@ -72,8 +76,8 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { if prop := r.GetProperty("DomainEndpointOptions"); prop.IsNotNil() { domain.Endpoint = elasticsearch.Endpoint{ Metadata: prop.Metadata(), - EnforceHTTPS: prop.GetBoolProperty("EnforceHTTPS", false), - TLSPolicy: prop.GetStringProperty("TLSSecurityPolicy", "Policy-Min-TLS-1-0-2019-07"), + EnforceHTTPS: prop.GetBoolProperty("EnforceHTTPS"), + TLSPolicy: prop.GetStringProperty("TLSSecurityPolicy"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go new file mode 100644 index 000000000000..afb9c3a81e22 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go @@ -0,0 +1,109 @@ +package elasticsearch + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elasticsearch" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected elasticsearch.Elasticsearch + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + OpenSearchServiceDomain: + Type: AWS::OpenSearchService::Domain + Properties: + DomainName: 'test' + ClusterConfig: + DedicatedMasterEnabled: true + NodeToNodeEncryptionOptions: + Enabled: true + EncryptionAtRestOptions: + Enabled: true + KmsKeyId: mykey + DomainEndpointOptions: + EnforceHTTPS: true + TLSSecurityPolicy: Policy-Min-TLS-1-0-2019-07 + AccessPolicies: + Version: '2012-10-17' + Statement: + - + Effect: 'Allow' + Principal: + AWS: 'arn:aws:iam::123456789012:user/opensearch-user' + Action: 'es:*' + Resource: 'arn:aws:es:us-east-1:846973539254:domain/test/*' + LogPublishingOptions: + AUDIT_LOGS: + CloudWatchLogsLogGroupArn: 'arn:aws:logs:us-east-1:123456789012:log-group:/aws/opensearch/domains/opensearch-application-logs' + Enabled: true +`, + expected: elasticsearch.Elasticsearch{ + Domains: []elasticsearch.Domain{ + { + DomainName: types.StringTest("test"), + DedicatedMasterEnabled: types.BoolTest(true), + LogPublishing: elasticsearch.LogPublishing{ + AuditEnabled: types.BoolTest(true), + CloudWatchLogGroupArn: types.StringTest("arn:aws:logs:us-east-1:123456789012:log-group:/aws/opensearch/domains/opensearch-application-logs"), + }, + TransitEncryption: elasticsearch.TransitEncryption{ + Enabled: types.BoolTest(true), + }, + AtRestEncryption: elasticsearch.AtRestEncryption{ + Enabled: types.BoolTest(true), + KmsKeyId: types.StringTest("mykey"), + }, + Endpoint: elasticsearch.Endpoint{ + EnforceHTTPS: types.BoolTest(true), + TLSPolicy: types.StringTest("Policy-Min-TLS-1-0-2019-07"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + OpenSearchServiceDomain: + Type: AWS::OpenSearchService::Domain + `, + expected: elasticsearch.Elasticsearch{ + Domains: []elasticsearch.Domain{{}}, + }, + }, + { + name: "Elasticsearch", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ElasticsearchDomain: + Type: AWS::Elasticsearch::Domain + Properties: + ElasticsearchClusterConfig: + DedicatedMasterEnabled: true + `, + expected: elasticsearch.Elasticsearch{ + Domains: []elasticsearch.Domain{ + { + DedicatedMasterEnabled: types.BoolTest(true), + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go index ca8fd631fa39..2c5ee494e66d 100644 --- a/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go @@ -1,14 +1,11 @@ package elb import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elb" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -18,7 +15,7 @@ func TestAdapt(t *testing.T) { expected elb.ELB }{ { - name: "LoadBalancer", + name: "complete", source: `AWSTemplateFormatVersion: "2010-09-09" Resources: LoadBalancer: @@ -27,6 +24,7 @@ Resources: - ALBLogsBucketPermission Properties: Name: "k8s-dev" + Scheme: internal IpAddressType: ipv4 LoadBalancerAttributes: - Key: routing.http2.enabled @@ -43,13 +41,36 @@ Resources: - Key: elbv2.k8s.aws/cluster Value: "biomage-dev" Type: application + Listener: + Type: AWS::ElasticLoadBalancingV2::Listener + Properties: + DefaultActions: + - Type: 'redirect' + RedirectConfig: + Port: 443 + Protocol: HTTPS + StatusCode: HTTP_302 + LoadBalancerArn: !Ref LoadBalancer + Protocol: HTTPS + SslPolicy: "ELBSecurityPolicy-TLS-1-2-2017-01" `, expected: elb.ELB{ LoadBalancers: []elb.LoadBalancer{ { - Metadata: types.NewTestMetadata(), - Type: types.String("application", types.NewTestMetadata()), - DropInvalidHeaderFields: types.Bool(true, types.NewTestMetadata()), + Type: types.StringTest("application"), + DropInvalidHeaderFields: types.BoolTest(true), + Internal: types.Bool(true, types.NewTestMetadata()), + Listeners: []elb.Listener{ + { + Protocol: types.StringTest("HTTPS"), + TLSPolicy: types.StringTest("ELBSecurityPolicy-TLS-1-2-2017-01"), + DefaultActions: []elb.Action{ + { + Type: types.StringTest("redirect"), + }, + }, + }, + }, }, }, }, @@ -58,15 +79,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fs := testutil.CreateFS(t, map[string]string{ - "template.yaml": tt.source, - }) - - p := parser.New() - fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") - require.NoError(t, err) - - testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } } diff --git a/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go b/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go index 50b8f26275d5..002b6487ba43 100644 --- a/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go +++ b/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go @@ -2,11 +2,11 @@ package elb import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elb" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getLoadBalancers(ctx parser2.FileContext) (loadbalancers []elb.LoadBalancer) { +func getLoadBalancers(ctx parser.FileContext) (loadbalancers []elb.LoadBalancer) { loadBalanacerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::LoadBalancer") @@ -24,7 +24,7 @@ func getLoadBalancers(ctx parser2.FileContext) (loadbalancers []elb.LoadBalancer return loadbalancers } -func getListeners(lbr *parser2.Resource, ctx parser2.FileContext) (listeners []elb.Listener) { +func getListeners(lbr *parser.Resource, ctx parser.FileContext) (listeners []elb.Listener) { listenerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::Listener") @@ -43,7 +43,7 @@ func getListeners(lbr *parser2.Resource, ctx parser2.FileContext) (listeners []e return listeners } -func getDefaultListenerActions(r *parser2.Resource) (actions []elb.Action) { +func getDefaultListenerActions(r *parser.Resource) (actions []elb.Action) { defaultActionsProp := r.GetProperty("DefaultActions") if defaultActionsProp.IsNotList() { return actions @@ -57,15 +57,15 @@ func getDefaultListenerActions(r *parser2.Resource) (actions []elb.Action) { return actions } -func isInternal(r *parser2.Resource) types.BoolValue { +func isInternal(r *parser.Resource) types.BoolValue { schemeProp := r.GetProperty("Scheme") if schemeProp.IsNotString() { return r.BoolDefault(false) } - return types.Bool(schemeProp.EqualTo("internal", parser2.IgnoreCase), schemeProp.Metadata()) + return types.Bool(schemeProp.EqualTo("internal", parser.IgnoreCase), schemeProp.Metadata()) } -func checkForDropInvalidHeaders(r *parser2.Resource) types.BoolValue { +func checkForDropInvalidHeaders(r *parser.Resource) types.BoolValue { attributesProp := r.GetProperty("LoadBalancerAttributes") if attributesProp.IsNotList() { return types.BoolDefault(false, r.Metadata()) diff --git a/pkg/iac/adapters/cloudformation/aws/iam/iam_test.go b/pkg/iac/adapters/cloudformation/aws/iam/iam_test.go new file mode 100644 index 000000000000..3e548dec0cf7 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/iam/iam_test.go @@ -0,0 +1,189 @@ +package iam + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected iam.IAM + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + myIAMPolicy: + Type: 'AWS::IAM::Policy' + Properties: + PolicyName: TestPolicy + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - 'cloudformation:Describe*' + Resource: '*' + Groups: + - !Ref MyGroup + Users: + - !Ref PublishUser + Roles: + - !Ref MyRole + MyGroup: + Type: AWS::IAM::Group + Properties: + GroupName: TestGroup + Policies: + - PolicyName: TestGroupPolicy + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Resource: arn:*:cloudfront::*:distribution/* + Action: + - cloudfront:CreateDistribution + MyUser: + Type: AWS::IAM::User + Properties: + UserName: TestUser + Policies: + - PolicyName: TestUserPolicy + PolicyDocument: + Statement: + - Action: 's3:*' + Effect: Allow + Resource: + - 'arn:aws:s3:::testbucket' + MyRole: + Type: 'AWS::IAM::Role' + Properties: + RoleName: TestRole + Policies: + - PolicyName: TestRolePolicy + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - 'sts:AssumeRole' + AccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref MyUser + Status: Active +`, + expected: iam.IAM{ + Policies: []iam.Policy{ + { + Name: types.StringTest("TestPolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"cloudformation:Describe*"}). + WithResources([]string{"*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + Users: []iam.User{ + { + Name: types.StringTest("TestUser"), + Policies: []iam.Policy{ + { + Name: types.StringTest("TestUserPolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"s3:*"}). + WithResources([]string{"arn:aws:s3:::testbucket"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + Groups: []iam.Group{ + { + Name: types.StringTest("TestGroup"), + Policies: []iam.Policy{ + { + Name: types.StringTest("TestGroupPolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"cloudfront:CreateDistribution"}). + WithResources([]string{"arn:*:cloudfront::*:distribution/*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + Roles: []iam.Role{ + { + Name: types.StringTest("TestRole"), + Policies: []iam.Policy{ + { + Name: types.StringTest("TestRolePolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"sts:AssumeRole"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + + `, + expected: iam.IAM{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/iam/policy.go b/pkg/iac/adapters/cloudformation/aws/iam/policy.go index 9843c8cdaa43..f83771f882d2 100644 --- a/pkg/iac/adapters/cloudformation/aws/iam/policy.go +++ b/pkg/iac/adapters/cloudformation/aws/iam/policy.go @@ -4,11 +4,11 @@ import ( "github.com/liamg/iamgo" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getPolicies(ctx parser2.FileContext) (policies []iam.Policy) { +func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { for _, policyResource := range ctx.GetResourcesByType("AWS::IAM::Policy") { policy := iam.Policy{ @@ -34,7 +34,7 @@ func getPolicies(ctx parser2.FileContext) (policies []iam.Policy) { return policies } -func getRoles(ctx parser2.FileContext) (roles []iam.Role) { +func getRoles(ctx parser.FileContext) (roles []iam.Role) { for _, roleResource := range ctx.GetResourcesByType("AWS::IAM::Role") { policyProp := roleResource.GetProperty("Policies") roleName := roleResource.GetStringProperty("RoleName") @@ -48,10 +48,10 @@ func getRoles(ctx parser2.FileContext) (roles []iam.Role) { return roles } -func getUsers(ctx parser2.FileContext) (users []iam.User) { +func getUsers(ctx parser.FileContext) (users []iam.User) { for _, userResource := range ctx.GetResourcesByType("AWS::IAM::User") { policyProp := userResource.GetProperty("Policies") - userName := userResource.GetStringProperty("GroupName") + userName := userResource.GetStringProperty("UserName") users = append(users, iam.User{ Metadata: userResource.Metadata(), @@ -64,7 +64,8 @@ func getUsers(ctx parser2.FileContext) (users []iam.User) { return users } -func getAccessKeys(ctx parser2.FileContext, username string) (accessKeys []iam.AccessKey) { +func getAccessKeys(ctx parser.FileContext, username string) (accessKeys []iam.AccessKey) { + // TODO: also search for a key by the logical id of the resource for _, keyResource := range ctx.GetResourcesByType("AWS::IAM::AccessKey") { keyUsername := keyResource.GetStringProperty("UserName") if !keyUsername.EqualTo(username) { @@ -86,7 +87,7 @@ func getAccessKeys(ctx parser2.FileContext, username string) (accessKeys []iam.A return accessKeys } -func getGroups(ctx parser2.FileContext) (groups []iam.Group) { +func getGroups(ctx parser.FileContext) (groups []iam.Group) { for _, groupResource := range ctx.GetResourcesByType("AWS::IAM::Group") { policyProp := groupResource.GetProperty("Policies") groupName := groupResource.GetStringProperty("GroupName") @@ -100,7 +101,7 @@ func getGroups(ctx parser2.FileContext) (groups []iam.Group) { return groups } -func getPoliciesDocs(policiesProp *parser2.Property) []iam.Policy { +func getPoliciesDocs(policiesProp *parser.Property) []iam.Policy { var policies []iam.Policy for _, policy := range policiesProp.AsList() { diff --git a/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis_test.go b/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis_test.go new file mode 100644 index 000000000000..ce38afadf806 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis_test.go @@ -0,0 +1,57 @@ +package kinesis + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/kinesis" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected kinesis.Kinesis + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MyStream: + Type: 'AWS::Kinesis::Stream' + Properties: + StreamEncryption: + EncryptionType: KMS + KeyId: key +`, + expected: kinesis.Kinesis{ + Streams: []kinesis.Stream{ + { + Encryption: kinesis.Encryption{ + Type: types.StringTest("KMS"), + KMSKeyID: types.StringTest("key"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyStream: + Type: 'AWS::Kinesis::Stream' + `, + expected: kinesis.Kinesis{ + Streams: []kinesis.Stream{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go b/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go index b2bc8bac3411..6c10ec6134c0 100644 --- a/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go +++ b/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go @@ -3,7 +3,6 @@ package kinesis import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/kinesis" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" ) func getStreams(ctx parser.FileContext) (streams []kinesis.Stream) { @@ -14,11 +13,6 @@ func getStreams(ctx parser.FileContext) (streams []kinesis.Stream) { stream := kinesis.Stream{ Metadata: r.Metadata(), - Encryption: kinesis.Encryption{ - Metadata: r.Metadata(), - Type: types.StringDefault("KMS", r.Metadata()), - KMSKeyID: types.StringDefault("", r.Metadata()), - }, } if prop := r.GetProperty("StreamEncryption"); prop.IsNotNil() { diff --git a/pkg/iac/adapters/cloudformation/aws/lambda/function.go b/pkg/iac/adapters/cloudformation/aws/lambda/function.go index 02bde4b903ff..f91a565d193a 100644 --- a/pkg/iac/adapters/cloudformation/aws/lambda/function.go +++ b/pkg/iac/adapters/cloudformation/aws/lambda/function.go @@ -2,29 +2,24 @@ package lambda import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/lambda" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getFunctions(ctx parser2.FileContext) (functions []lambda.Function) { +func getFunctions(ctx parser.FileContext) (functions []lambda.Function) { functionResources := ctx.GetResourcesByType("AWS::Lambda::Function") for _, r := range functionResources { function := lambda.Function{ - Metadata: r.Metadata(), - Tracing: lambda.Tracing{ - Metadata: r.Metadata(), - Mode: types.StringDefault("PassThrough", r.Metadata()), - }, + Metadata: r.Metadata(), Permissions: getPermissions(r, ctx), } if prop := r.GetProperty("TracingConfig"); prop.IsNotNil() { function.Tracing = lambda.Tracing{ Metadata: prop.Metadata(), - Mode: prop.GetStringProperty("Mode", "PassThrough"), + Mode: prop.GetStringProperty("Mode"), } } @@ -34,7 +29,7 @@ func getFunctions(ctx parser2.FileContext) (functions []lambda.Function) { return functions } -func getPermissions(funcR *parser2.Resource, ctx parser2.FileContext) (perms []lambda.Permission) { +func getPermissions(funcR *parser.Resource, ctx parser.FileContext) (perms []lambda.Permission) { permissionResources := ctx.GetResourcesByType("AWS::Lambda::Permission") diff --git a/pkg/iac/adapters/cloudformation/aws/lambda/lambda_test.go b/pkg/iac/adapters/cloudformation/aws/lambda/lambda_test.go new file mode 100644 index 000000000000..4262181f89ee --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/lambda/lambda_test.go @@ -0,0 +1,76 @@ +package lambda + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/lambda" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected lambda.Lambda + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + lambdaFunction: + Type: AWS::Lambda::Function + Properties: + TracingConfig: + Mode: Active + permission: + Type: AWS::Lambda::Permission + Properties: + FunctionName: !Ref lambdaFunction + Action: lambda:InvokeFunction + Principal: s3.amazonaws.com + SourceArn: arn +`, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Tracing: lambda.Tracing{ + Mode: types.StringTest("Active"), + }, + Permissions: []lambda.Permission{ + { + Principal: types.StringTest("s3.amazonaws.com"), + SourceARN: types.StringTest("arn"), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + lambdaFunction: + Type: AWS::Lambda::Function + permission: + Type: AWS::Lambda::Permission + Properties: + FunctionName: !Ref lambdaFunction + `, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Permissions: []lambda.Permission{{}}, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/mq/mq_test.go b/pkg/iac/adapters/cloudformation/aws/mq/mq_test.go new file mode 100644 index 000000000000..b4f1d5048898 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/mq/mq_test.go @@ -0,0 +1,59 @@ +package mq + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/mq" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected mq.MQ + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + BasicBroker: + Type: "AWS::AmazonMQ::Broker" + Properties: + PubliclyAccessible: true + Logs: + Audit: true + General: true +`, + expected: mq.MQ{ + Brokers: []mq.Broker{ + { + PublicAccess: types.BoolTest(true), + Logging: mq.Logging{ + Audit: types.BoolTest(true), + General: types.BoolTest(true), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + BasicBroker: + Type: "AWS::AmazonMQ::Broker" + `, + expected: mq.MQ{ + Brokers: []mq.Broker{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/msk/msk_test.go b/pkg/iac/adapters/cloudformation/aws/msk/msk_test.go new file mode 100644 index 000000000000..2cc5a6cd0945 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/msk/msk_test.go @@ -0,0 +1,87 @@ +package msk + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/msk" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected msk.MSK + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + cluster: + Type: AWS::MSK::Cluster + Properties: + EncryptionInfo: + EncryptionInTransit: + ClientBroker: 'PLAINTEXT' + EncryptionAtRest: + DataVolumeKMSKeyId: key + LoggingInfo: + BrokerLogs: + S3: + Enabled: true + CloudWatchLogs: + Enabled: true + Firehose: + Enabled: true +`, + expected: msk.MSK{ + Clusters: []msk.Cluster{ + { + EncryptionInTransit: msk.EncryptionInTransit{ + ClientBroker: types.StringTest("PLAINTEXT"), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + KMSKeyARN: types.StringTest("key"), + Enabled: types.BoolTest(true), + }, + Logging: msk.Logging{ + Broker: msk.BrokerLogging{ + S3: msk.S3Logging{ + Enabled: types.BoolTest(true), + }, + Firehose: msk.FirehoseLogging{ + Enabled: types.BoolTest(true), + }, + Cloudwatch: msk.CloudwatchLogging{ + Enabled: types.BoolTest(true), + }, + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cluster: + Type: AWS::MSK::Cluster + `, + expected: msk.MSK{ + Clusters: []msk.Cluster{{ + EncryptionInTransit: msk.EncryptionInTransit{ + ClientBroker: types.StringTest("TLS"), + }, + }}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go b/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go index 3685a655aee2..33012cbd2236 100644 --- a/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go @@ -2,11 +2,11 @@ package neptune import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/neptune" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []neptune.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []neptune.Cluster) { for _, r := range ctx.GetResourcesByType("AWS::Neptune::DBCluster") { cluster := neptune.Cluster{ @@ -23,7 +23,7 @@ func getClusters(ctx parser2.FileContext) (clusters []neptune.Cluster) { return clusters } -func getAuditLog(r *parser2.Resource) types.BoolValue { +func getAuditLog(r *parser.Resource) types.BoolValue { if logsProp := r.GetProperty("EnableCloudwatchLogsExports"); logsProp.IsList() { if logsProp.Contains("audit") { return types.Bool(true, logsProp.Metadata()) diff --git a/pkg/iac/adapters/cloudformation/aws/neptune/neptune_test.go b/pkg/iac/adapters/cloudformation/aws/neptune/neptune_test.go new file mode 100644 index 000000000000..8e63a481ff2e --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/neptune/neptune_test.go @@ -0,0 +1,59 @@ +package neptune + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/neptune" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected neptune.Neptune + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + cluster: + Type: AWS::Neptune::DBCluster + Properties: + StorageEncrypted: true + KmsKeyId: key + EnableCloudwatchLogsExports: + - audit +`, + expected: neptune.Neptune{ + Clusters: []neptune.Cluster{ + { + StorageEncrypted: types.BoolTest(true), + KMSKeyID: types.StringTest("key"), + Logging: neptune.Logging{ + Audit: types.BoolTest(true), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cluster: + Type: AWS::Neptune::DBCluster + `, + expected: neptune.Neptune{ + Clusters: []neptune.Cluster{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go index 7685c3118a0e..4875395c8506 100644 --- a/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go @@ -1,14 +1,11 @@ package rds import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/rds" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -18,7 +15,7 @@ func TestAdapt(t *testing.T) { expected rds.RDS }{ { - name: "cluster with instances", + name: "complete", source: `AWSTemplateFormatVersion: 2010-09-09 Resources: RDSCluster: @@ -65,92 +62,113 @@ Resources: Properties: Description: "CloudFormation Sample MySQL Parameter Group" DBParameterGroupName: "testgroup" + Parameters: + sql_mode: IGNORE_SPACE + DbSecurityByEC2SecurityGroup: + Type: AWS::RDS::DBSecurityGroup + Properties: + GroupDescription: "Ingress for Amazon EC2 security group" `, expected: rds.RDS{ + Classic: rds.Classic{ + DBSecurityGroups: []rds.DBSecurityGroup{{}}, + }, ParameterGroups: []rds.ParameterGroups{ { - Metadata: types.NewTestMetadata(), - DBParameterGroupName: types.String("testgroup", types.NewTestMetadata()), + DBParameterGroupName: types.StringTest("testgroup"), }, }, Clusters: []rds.Cluster{ { - Metadata: types.NewTestMetadata(), - BackupRetentionPeriodDays: types.Int(2, types.NewTestMetadata()), - Engine: types.String("aurora-postgresql", types.NewTestMetadata()), + BackupRetentionPeriodDays: types.IntTest(2), + Engine: types.StringTest("aurora-postgresql"), Encryption: rds.Encryption{ - EncryptStorage: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("your-kms-key-id", types.NewTestMetadata()), + EncryptStorage: types.BoolTest(true), + KMSKeyID: types.StringTest("your-kms-key-id"), }, PerformanceInsights: rds.PerformanceInsights{ - Metadata: types.NewTestMetadata(), - Enabled: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("test-kms-key-id", types.NewTestMetadata()), + Enabled: types.BoolTest(true), + KMSKeyID: types.StringTest("test-kms-key-id"), }, - PublicAccess: types.Bool(false, types.NewTestMetadata()), - DeletionProtection: types.Bool(true, types.NewTestMetadata()), + PublicAccess: types.BoolTest(false), + DeletionProtection: types.BoolTest(true), Instances: []rds.ClusterInstance{ { Instance: rds.Instance{ - Metadata: types.NewTestMetadata(), - StorageEncrypted: types.Bool(true, types.NewTestMetadata()), + StorageEncrypted: types.BoolTest(true), Encryption: rds.Encryption{ - EncryptStorage: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("your-kms-key-id", types.NewTestMetadata()), + EncryptStorage: types.BoolTest(true), + KMSKeyID: types.StringTest("your-kms-key-id"), }, - DBInstanceIdentifier: types.String("test", types.NewTestMetadata()), - PubliclyAccessible: types.Bool(false, types.NewTestMetadata()), - PublicAccess: types.BoolDefault(false, types.NewTestMetadata()), - BackupRetentionPeriodDays: types.IntDefault(1, types.NewTestMetadata()), - Engine: types.StringDefault("aurora-mysql", types.NewTestMetadata()), - EngineVersion: types.String("5.7.12", types.NewTestMetadata()), - MultiAZ: types.Bool(true, types.NewTestMetadata()), - AutoMinorVersionUpgrade: types.Bool(true, types.NewTestMetadata()), - DBInstanceArn: types.String("arn:aws:rds:us-east-2:123456789012:db:my-mysql-instance-1", types.NewTestMetadata()), - IAMAuthEnabled: types.Bool(true, types.NewTestMetadata()), + DBInstanceIdentifier: types.StringTest("test"), + PubliclyAccessible: types.BoolTest(false), + PublicAccess: types.BoolTest(false), + BackupRetentionPeriodDays: types.IntTest(1), + Engine: types.StringTest("aurora-mysql"), + EngineVersion: types.StringTest("5.7.12"), + MultiAZ: types.BoolTest(true), + AutoMinorVersionUpgrade: types.BoolTest(true), + DBInstanceArn: types.StringTest("arn:aws:rds:us-east-2:123456789012:db:my-mysql-instance-1"), + IAMAuthEnabled: types.BoolTest(true), PerformanceInsights: rds.PerformanceInsights{ - Metadata: types.NewTestMetadata(), - Enabled: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("test-kms-key-id2", types.NewTestMetadata()), + Enabled: types.BoolTest(true), + KMSKeyID: types.StringTest("test-kms-key-id2"), }, EnabledCloudwatchLogsExports: []types.StringValue{ - types.String("error", types.NewTestMetadata()), - types.String("general", types.NewTestMetadata()), + types.StringTest("error"), + types.StringTest("general"), }, DBParameterGroups: []rds.DBParameterGroupsList{ { - DBParameterGroupName: types.String("testgroup", types.NewTestMetadata()), + DBParameterGroupName: types.StringTest("testgroup"), }, }, TagList: []rds.TagList{ - { - Metadata: types.NewTestMetadata(), - }, - { - Metadata: types.NewTestMetadata(), - }, + {}, + {}, }, }, - ClusterIdentifier: types.String("RDSCluster", types.NewTestMetadata()), + ClusterIdentifier: types.StringTest("RDSCluster"), }, }, }, }, }, }, + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + RDSCluster: + Type: 'AWS::RDS::DBCluster' + RDSDBInstance1: + Type: 'AWS::RDS::DBInstance' + RDSDBParameterGroup: + Type: 'AWS::RDS::DBParameterGroup' + DbSecurityByEC2SecurityGroup: + Type: AWS::RDS::DBSecurityGroup +`, + expected: rds.RDS{ + Classic: rds.Classic{ + DBSecurityGroups: []rds.DBSecurityGroup{{}}, + }, + ParameterGroups: []rds.ParameterGroups{{}}, + Clusters: []rds.Cluster{{ + Engine: types.StringTest("aurora"), + BackupRetentionPeriodDays: types.IntTest(1), + }}, + Instances: []rds.Instance{{ + BackupRetentionPeriodDays: types.IntTest(1), + PublicAccess: types.BoolTest(true), + DBParameterGroups: []rds.DBParameterGroupsList{{}}, + }}, + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fs := testutil.CreateFS(t, map[string]string{ - "template.yaml": tt.source, - }) - - p := parser.New() - fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") - require.NoError(t, err) - - testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/rds/instance.go b/pkg/iac/adapters/cloudformation/aws/rds/instance.go index 6b4a39e7acf7..256eada02aac 100644 --- a/pkg/iac/adapters/cloudformation/aws/rds/instance.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/instance.go @@ -2,11 +2,11 @@ package rds import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/rds" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClustersAndInstances(ctx parser2.FileContext) ([]rds.Cluster, []rds.Instance) { +func getClustersAndInstances(ctx parser.FileContext) ([]rds.Cluster, []rds.Instance) { clusterMap := getClusters(ctx) @@ -68,12 +68,15 @@ func getClustersAndInstances(ctx parser2.FileContext) ([]rds.Cluster, []rds.Inst return clusters, orphans } -func getDBParameterGroups(ctx parser2.FileContext, r *parser2.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { +func getDBParameterGroups(ctx parser.FileContext, r *parser.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { + + var parameterGroupList []rds.DBParameterGroupsList dbParameterGroupName := r.GetStringProperty("DBParameterGroupName") for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { name := r.GetStringProperty("DBParameterGroupName") + // TODO: find by resource logical id if !dbParameterGroupName.EqualTo(name.Value()) { continue } @@ -82,13 +85,13 @@ func getDBParameterGroups(ctx parser2.FileContext, r *parser2.Resource) (dbParam DBParameterGroupName: name, KMSKeyID: types.StringUnresolvable(r.Metadata()), } - dbParameterGroup = append(dbParameterGroup, dbpmgl) + parameterGroupList = append(dbParameterGroup, dbpmgl) } - return dbParameterGroup + return parameterGroupList } -func getEnabledCloudwatchLogsExports(r *parser2.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { +func getEnabledCloudwatchLogsExports(r *parser.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { enabledCloudwatchLogExportList := r.GetProperty("EnableCloudwatchLogsExports") if enabledCloudwatchLogExportList.IsNil() || enabledCloudwatchLogExportList.IsNotList() { @@ -101,7 +104,7 @@ func getEnabledCloudwatchLogsExports(r *parser2.Resource) (enabledcloudwatchloge return enabledcloudwatchlogexportslist } -func getTagList(r *parser2.Resource) (taglist []rds.TagList) { +func getTagList(r *parser.Resource) (taglist []rds.TagList) { tagLists := r.GetProperty("Tags") if tagLists.IsNil() || tagLists.IsNotList() { @@ -116,7 +119,7 @@ func getTagList(r *parser2.Resource) (taglist []rds.TagList) { return taglist } -func getReadReplicaDBInstanceIdentifiers(r *parser2.Resource) (readreplicadbidentifier []types.StringValue) { +func getReadReplicaDBInstanceIdentifiers(r *parser.Resource) (readreplicadbidentifier []types.StringValue) { readReplicaDBIdentifier := r.GetProperty("SourceDBInstanceIdentifier") if readReplicaDBIdentifier.IsNil() || readReplicaDBIdentifier.IsNotList() { diff --git a/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go b/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go index 98df5187401b..f47c2f70a706 100644 --- a/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go @@ -2,11 +2,11 @@ package rds import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/rds" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getParameterGroups(ctx parser2.FileContext) (parametergroups []rds.ParameterGroups) { +func getParameterGroups(ctx parser.FileContext) (parametergroups []rds.ParameterGroups) { for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { @@ -23,10 +23,12 @@ func getParameterGroups(ctx parser2.FileContext) (parametergroups []rds.Paramete return parametergroups } -func getParameters(r *parser2.Resource) (parameters []rds.Parameters) { +func getParameters(r *parser.Resource) (parameters []rds.Parameters) { dBParam := r.GetProperty("Parameters") + // TODO: parameters is JSON + // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbparametergroup.html#cfn-rds-dbparametergroup-parameters if dBParam.IsNil() || dBParam.IsNotList() { return parameters } diff --git a/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go b/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go index 6aac98978b94..c8acf8997af0 100644 --- a/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go @@ -3,7 +3,6 @@ package redshift import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/redshift" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" ) func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { @@ -12,14 +11,12 @@ func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { cluster := redshift.Cluster{ Metadata: r.Metadata(), ClusterIdentifier: r.GetStringProperty("ClusterIdentifier"), - AllowVersionUpgrade: r.GetBoolProperty("AllowVersionUpgrade"), + AllowVersionUpgrade: r.GetBoolProperty("AllowVersionUpgrade", true), NodeType: r.GetStringProperty("NodeType"), - NumberOfNodes: r.GetIntProperty("NumberOfNodes"), + NumberOfNodes: r.GetIntProperty("NumberOfNodes", 1), PubliclyAccessible: r.GetBoolProperty("PubliclyAccessible"), MasterUsername: r.GetStringProperty("MasterUsername"), - VpcId: types.String("", r.Metadata()), - LoggingEnabled: types.Bool(false, r.Metadata()), - AutomatedSnapshotRetentionPeriod: r.GetIntProperty("AutomatedSnapshotRetentionPeriod"), + AutomatedSnapshotRetentionPeriod: r.GetIntProperty("AutomatedSnapshotRetentionPeriod", 1), Encryption: redshift.Encryption{ Metadata: r.Metadata(), Enabled: r.GetBoolProperty("Encrypted"), @@ -38,10 +35,8 @@ func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { } func getParameters(ctx parser.FileContext) (parameter []redshift.ClusterParameter) { - - paraRes := ctx.GetResourcesByType("AWS::Redshift::ClusterParameterGroup") var parameters []redshift.ClusterParameter - for _, r := range paraRes { + for _, r := range ctx.GetResourcesByType("AWS::Redshift::ClusterParameterGroup") { for _, par := range r.GetProperty("Parameters").AsList() { parameters = append(parameters, redshift.ClusterParameter{ Metadata: par.Metadata(), diff --git a/pkg/iac/adapters/cloudformation/aws/redshift/redshift_test.go b/pkg/iac/adapters/cloudformation/aws/redshift/redshift_test.go new file mode 100644 index 000000000000..a14117a21961 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/redshift/redshift_test.go @@ -0,0 +1,111 @@ +package redshift + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/redshift" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected redshift.Redshift + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myCluster: + Type: "AWS::Redshift::Cluster" + Properties: + DBName: "mydb" + ClusterIdentifier: myexamplecluster + AllowVersionUpgrade: false + MasterUsername: "master" + NodeType: "ds2.xlarge" + NumberOfNodes: 2 + PubliclyAccessible: true + AutomatedSnapshotRetentionPeriod: 2 + Encrypted: true + KmsKeyId: key + Endpoint: + Port: 2000 + ClusterSubnetGroupName: test + myClusterParameterGroup: + Type: "AWS::Redshift::ClusterParameterGroup" + Properties: + Parameters: + - + ParameterName: "enable_user_activity_logging" + ParameterValue: "true" + mySecGroup: + Type: AWS::Redshift::ClusterSecurityGroup + Properties: + Description: test + `, + expected: redshift.Redshift{ + Clusters: []redshift.Cluster{ + { + ClusterIdentifier: types.StringTest("myexamplecluster"), + AllowVersionUpgrade: types.BoolTest(false), + MasterUsername: types.StringTest("master"), + NodeType: types.StringTest("ds2.xlarge"), + NumberOfNodes: types.IntTest(2), + PubliclyAccessible: types.BoolTest(true), + AutomatedSnapshotRetentionPeriod: types.IntTest(2), + Encryption: redshift.Encryption{ + Enabled: types.BoolTest(true), + KMSKeyID: types.StringTest("key"), + }, + EndPoint: redshift.EndPoint{ + Port: types.IntTest(2000), + }, + SubnetGroupName: types.StringTest("test"), + }, + }, + ClusterParameters: []redshift.ClusterParameter{ + { + ParameterName: types.StringTest("enable_user_activity_logging"), + ParameterValue: types.StringTest("true"), + }, + }, + SecurityGroups: []redshift.SecurityGroup{ + { + Description: types.StringTest("test"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + myCluster: + Type: "AWS::Redshift::Cluster" + mySecGroup: + Type: AWS::Redshift::ClusterSecurityGroup + myClusterParameterGroup: + Type: "AWS::Redshift::ClusterParameterGroup" +`, + expected: redshift.Redshift{ + Clusters: []redshift.Cluster{ + { + AllowVersionUpgrade: types.BoolTest(true), + AutomatedSnapshotRetentionPeriod: types.IntTest(1), + NumberOfNodes: types.IntTest(1), + }, + }, + SecurityGroups: []redshift.SecurityGroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/s3/bucket.go b/pkg/iac/adapters/cloudformation/aws/s3/bucket.go index 35b95520fd9c..5f5329fc6714 100644 --- a/pkg/iac/adapters/cloudformation/aws/s3/bucket.go +++ b/pkg/iac/adapters/cloudformation/aws/s3/bucket.go @@ -50,16 +50,17 @@ func getBuckets(cfFile parser.FileContext) []s3.Bucket { } func getPublicAccessBlock(r *parser.Resource) *s3.PublicAccessBlock { - if block := r.GetProperty("PublicAccessBlockConfiguration"); block.IsNil() { + block := r.GetProperty("PublicAccessBlockConfiguration") + if block.IsNil() { return nil } return &s3.PublicAccessBlock{ - Metadata: r.Metadata(), - BlockPublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicAcls"), - BlockPublicPolicy: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicPolicy"), - IgnorePublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.IgnorePublicAcls"), - RestrictPublicBuckets: r.GetBoolProperty("PublicAccessBlockConfiguration.RestrictPublicBuckets"), + Metadata: block.Metadata(), + BlockPublicACLs: block.GetBoolProperty("BlockPublicAcls"), + BlockPublicPolicy: block.GetBoolProperty("BlockPublicPolicy"), + IgnorePublicACLs: block.GetBoolProperty("IgnorePublicAcls"), + RestrictPublicBuckets: block.GetBoolProperty("RestrictPublicBuckets"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go b/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go index f4139c7ad15b..ee8fffb39f75 100644 --- a/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go +++ b/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go @@ -1,14 +1,11 @@ package s3 import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -56,36 +53,44 @@ Resources: ExpirationInDays: 365 AccelerateConfiguration: AccelerationStatus: Enabled + VersioningConfiguration: + Status: Enabled + WebsiteConfiguration: + IndexDocument: index.html `, expected: s3.S3{ Buckets: []s3.Bucket{ { - Name: types.String("logging-bucket", types.NewTestMetadata()), + Name: types.StringTest("logging-bucket"), }, { - Name: types.String("test-bucket", types.NewTestMetadata()), + Name: types.StringTest("test-bucket"), Encryption: s3.Encryption{ - Enabled: types.Bool(true, types.NewTestMetadata()), - Algorithm: types.String("aws:kms", types.NewTestMetadata()), - KMSKeyId: types.String("Key", types.NewTestMetadata()), + Enabled: types.BoolTest(true), + Algorithm: types.StringTest("aws:kms"), + KMSKeyId: types.StringTest("Key"), }, - ACL: types.String("aws-exec-read", types.NewTestMetadata()), + ACL: types.StringTest("aws-exec-read"), PublicAccessBlock: &s3.PublicAccessBlock{ - BlockPublicACLs: types.Bool(true, types.NewTestMetadata()), - BlockPublicPolicy: types.Bool(true, types.NewTestMetadata()), - IgnorePublicACLs: types.Bool(true, types.NewTestMetadata()), - RestrictPublicBuckets: types.Bool(true, types.NewTestMetadata()), + BlockPublicACLs: types.BoolTest(true), + BlockPublicPolicy: types.BoolTest(true), + IgnorePublicACLs: types.BoolTest(true), + RestrictPublicBuckets: types.BoolTest(true), }, Logging: s3.Logging{ - TargetBucket: types.String("LoggingBucket", types.NewTestMetadata()), - Enabled: types.Bool(true, types.NewTestMetadata()), + TargetBucket: types.StringTest("LoggingBucket"), + Enabled: types.BoolTest(true), }, LifecycleConfiguration: []s3.Rules{ { - Status: types.String("Enabled", types.NewTestMetadata()), + Status: types.StringTest("Enabled"), }, }, - AccelerateConfigurationStatus: types.String("Enabled", types.NewTestMetadata()), + AccelerateConfigurationStatus: types.StringTest("Enabled"), + Versioning: s3.Versioning{ + Enabled: types.BoolTest(true), + }, + Website: &s3.Website{}, }, }, }, @@ -101,7 +106,7 @@ Resources: expected: s3.S3{ Buckets: []s3.Bucket{ { - Name: types.String("test-bucket", types.NewTestMetadata()), + Name: types.StringTest("test-bucket"), Encryption: s3.Encryption{ Enabled: types.BoolDefault(false, types.NewTestMetadata()), }, @@ -126,11 +131,11 @@ Resources: expected: s3.S3{ Buckets: []s3.Bucket{ { - Name: types.String("test-bucket", types.NewTestMetadata()), + Name: types.StringTest("test-bucket"), Encryption: s3.Encryption{ Enabled: types.BoolDefault(false, types.NewTestMetadata()), - KMSKeyId: types.String("alias/my-key", types.NewTestMetadata()), - Algorithm: types.String("aes256", types.NewTestMetadata()), + KMSKeyId: types.StringTest("alias/my-key"), + Algorithm: types.StringTest("aes256"), }, }, }, @@ -140,16 +145,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - - fsys := testutil.CreateFS(t, map[string]string{ - "main.yaml": tt.source, - }) - - fctx, err := parser.New().ParseFile(context.TODO(), fsys, "main.yaml") - require.NoError(t, err) - - adapted := Adapt(*fctx) - testutil.AssertDefsecEqual(t, tt.expected, adapted) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/sam/api.go b/pkg/iac/adapters/cloudformation/aws/sam/api.go index d42010166914..4d4f04e6e83a 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/api.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/api.go @@ -2,11 +2,11 @@ package sam import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getApis(cfFile parser2.FileContext) (apis []sam.API) { +func getApis(cfFile parser.FileContext) (apis []sam.API) { apiResources := cfFile.GetResourcesByType("AWS::Serverless::Api") for _, r := range apiResources { @@ -25,7 +25,7 @@ func getApis(cfFile parser2.FileContext) (apis []sam.API) { return apis } -func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { +func getRestMethodSettings(r *parser.Resource) sam.RESTMethodSettings { settings := sam.RESTMethodSettings{ Metadata: r.Metadata(), @@ -35,6 +35,8 @@ func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { MetricsEnabled: iacTypes.BoolDefault(false, r.Metadata()), } + // TODO: MethodSettings is list + // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-stage.html#cfn-apigateway-stage-methodsettings settingsProp := r.GetProperty("MethodSettings") if settingsProp.IsNotNil() { @@ -47,7 +49,7 @@ func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { } if loggingLevel := settingsProp.GetProperty("LoggingLevel"); loggingLevel.IsNotNil() { - if loggingLevel.EqualTo("OFF", parser2.IgnoreCase) { + if loggingLevel.EqualTo("OFF", parser.IgnoreCase) { settings.LoggingEnabled = iacTypes.Bool(false, loggingLevel.Metadata()) } else { settings.LoggingEnabled = iacTypes.Bool(true, loggingLevel.Metadata()) @@ -58,7 +60,7 @@ func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { return settings } -func getAccessLogging(r *parser2.Resource) sam.AccessLogging { +func getAccessLogging(r *parser.Resource) sam.AccessLogging { logging := sam.AccessLogging{ Metadata: r.Metadata(), @@ -75,19 +77,17 @@ func getAccessLogging(r *parser2.Resource) sam.AccessLogging { return logging } -func getDomainConfiguration(r *parser2.Resource) sam.DomainConfiguration { +func getDomainConfiguration(r *parser.Resource) sam.DomainConfiguration { domainConfig := sam.DomainConfiguration{ - Metadata: r.Metadata(), - Name: iacTypes.StringDefault("", r.Metadata()), - SecurityPolicy: iacTypes.StringDefault("TLS_1_0", r.Metadata()), + Metadata: r.Metadata(), } if domain := r.GetProperty("Domain"); domain.IsNotNil() { domainConfig = sam.DomainConfiguration{ Metadata: domain.Metadata(), - Name: domain.GetStringProperty("DomainName", ""), - SecurityPolicy: domain.GetStringProperty("SecurityPolicy", "TLS_1_0"), + Name: domain.GetStringProperty("DomainName"), + SecurityPolicy: domain.GetStringProperty("SecurityPolicy"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/sam/function.go b/pkg/iac/adapters/cloudformation/aws/sam/function.go index f6f2cfd747a6..161b078bf681 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/function.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/function.go @@ -5,18 +5,18 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getFunctions(cfFile parser2.FileContext) (functions []sam.Function) { +func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { functionResources := cfFile.GetResourcesByType("AWS::Serverless::Function") for _, r := range functionResources { function := sam.Function{ Metadata: r.Metadata(), FunctionName: r.GetStringProperty("FunctionName"), - Tracing: r.GetStringProperty("Tracing", sam.TracingModePassThrough), + Tracing: r.GetStringProperty("Tracing"), ManagedPolicies: nil, Policies: nil, } @@ -28,7 +28,7 @@ func getFunctions(cfFile parser2.FileContext) (functions []sam.Function) { return functions } -func setFunctionPolicies(r *parser2.Resource, function *sam.Function) { +func setFunctionPolicies(r *parser.Resource, function *sam.Function) { policies := r.GetProperty("Policies") if policies.IsNotNil() { if policies.IsString() { diff --git a/pkg/iac/adapters/cloudformation/aws/sam/http_api.go b/pkg/iac/adapters/cloudformation/aws/sam/http_api.go index c51c3efb8913..02f9ba6c5ef9 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/http_api.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/http_api.go @@ -2,11 +2,11 @@ package sam import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getHttpApis(cfFile parser2.FileContext) (apis []sam.HttpAPI) { +func getHttpApis(cfFile parser.FileContext) (apis []sam.HttpAPI) { apiResources := cfFile.GetResourcesByType("AWS::Serverless::HttpApi") for _, r := range apiResources { @@ -24,7 +24,7 @@ func getHttpApis(cfFile parser2.FileContext) (apis []sam.HttpAPI) { return apis } -func getAccessLoggingV2(r *parser2.Resource) sam.AccessLogging { +func getAccessLoggingV2(r *parser.Resource) sam.AccessLogging { logging := sam.AccessLogging{ Metadata: r.Metadata(), @@ -41,7 +41,7 @@ func getAccessLoggingV2(r *parser2.Resource) sam.AccessLogging { return logging } -func getRouteSettings(r *parser2.Resource) sam.RouteSettings { +func getRouteSettings(r *parser.Resource) sam.RouteSettings { routeSettings := sam.RouteSettings{ Metadata: r.Metadata(), @@ -52,7 +52,8 @@ func getRouteSettings(r *parser2.Resource) sam.RouteSettings { if route := r.GetProperty("DefaultRouteSettings"); route.IsNotNil() { routeSettings = sam.RouteSettings{ - Metadata: route.Metadata(), + Metadata: route.Metadata(), + // TODO: LoggingLevel is string LoggingEnabled: route.GetBoolProperty("LoggingLevel"), DataTraceEnabled: route.GetBoolProperty("DataTraceEnabled"), DetailedMetricsEnabled: route.GetBoolProperty("DetailedMetricsEnabled"), diff --git a/pkg/iac/adapters/cloudformation/aws/sam/sam_test.go b/pkg/iac/adapters/cloudformation/aws/sam/sam_test.go new file mode 100644 index 000000000000..ec2fed201ea6 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/sam/sam_test.go @@ -0,0 +1,213 @@ +package sam + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected sam.SAM + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + ApiGatewayApi: + Type: AWS::Serverless::Api + Properties: + StageName: prod + Name: test + TracingEnabled: true + Domain: + DomainName: domain + SecurityPolicy: "TLS_1_2" + MethodSettings: + - DataTraceEnabled: true + CacheDataEncrypted: true + MetricsEnabled: true + LoggingLevel: INFO + AccessLogSetting: + DestinationArn: 'arn:aws:logs:us-east-1:123456789:log-group:my-log-group' + HttpApi: + Type: AWS::Serverless::HttpApi + Properties: + Name: test + Domain: + DomainName: test + SecurityPolicy: "TLS_1_2" + AccessLogSettings: + DestinationArn: 'arn:aws:logs:us-east-1:123456789:log-group:my-log-group' + DefaultRouteSettings: + LoggingLevel: INFO + DataTraceEnabled: true + DetailedMetricsEnabled: true + myFunction: + Type: AWS::Serverless::Function + Properties: + FunctionName: test + Tracing: Active + Policies: + - AWSLambdaExecute + - Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - s3:GetObject + Resource: 'arn:aws:s3:::my-bucket/*' + MySampleStateMachine: + Type: AWS::Serverless::StateMachine + Properties: + Logging: + Level: ALL + Tracing: + Enabled: true + Policies: + - Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "cloudwatch:*" + Resource: "*" + myTable: + Type: AWS::Serverless::SimpleTable + Properties: + TableName: my-table + SSESpecification: + SSEEnabled: "true" + KMSMasterKeyId: "kmskey" +`, + expected: sam.SAM{ + APIs: []sam.API{ + { + Name: types.StringTest("test"), + TracingEnabled: types.BoolTest(true), + DomainConfiguration: sam.DomainConfiguration{ + Name: types.StringTest("domain"), + SecurityPolicy: types.StringTest("TLS_1_2"), + }, + AccessLogging: sam.AccessLogging{ + CloudwatchLogGroupARN: types.StringTest("arn:aws:logs:us-east-1:123456789:log-group:my-log-group"), + }, + }, + }, + HttpAPIs: []sam.HttpAPI{ + { + Name: types.StringTest("test"), + DomainConfiguration: sam.DomainConfiguration{ + Name: types.StringTest("test"), + SecurityPolicy: types.StringTest("TLS_1_2"), + }, + AccessLogging: sam.AccessLogging{ + CloudwatchLogGroupARN: types.StringTest("arn:aws:logs:us-east-1:123456789:log-group:my-log-group"), + }, + DefaultRouteSettings: sam.RouteSettings{ + DataTraceEnabled: types.BoolTest(true), + DetailedMetricsEnabled: types.BoolTest(true), + }, + }, + }, + Functions: []sam.Function{ + { + FunctionName: types.StringTest("test"), + Tracing: types.StringTest("Active"), + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"s3:GetObject"}). + WithResources([]string{"arn:aws:s3:::my-bucket/*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + ManagedPolicies: []types.StringValue{ + types.StringTest("AWSLambdaExecute"), + }, + }, + }, + StateMachines: []sam.StateMachine{ + { + LoggingConfiguration: sam.LoggingConfiguration{ + LoggingEnabled: types.BoolTest(true), + }, + Tracing: sam.TracingConfiguration{ + Enabled: types.BoolTest(true), + }, + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"cloudwatch:*"}). + WithResources([]string{"*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + SimpleTables: []sam.SimpleTable{ + { + TableName: types.StringTest("my-table"), + SSESpecification: sam.SSESpecification{ + Enabled: types.BoolTest(true), + KMSMasterKeyID: types.StringTest("kmskey"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ApiGatewayApi: + Type: AWS::Serverless::Api + HttpApi: + Type: AWS::Serverless::HttpApi + myFunction: + Type: AWS::Serverless::Function + MySampleStateMachine: + Type: AWS::Serverless::StateMachine + myTable: + Type: AWS::Serverless::SimpleTable +`, + expected: sam.SAM{ + APIs: []sam.API{{}}, + HttpAPIs: []sam.HttpAPI{{}}, + Functions: []sam.Function{{}}, + StateMachines: []sam.StateMachine{{}}, + SimpleTables: []sam.SimpleTable{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go index efcaf3772be4..2a57afd2bdb6 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go @@ -5,11 +5,11 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMachine) { +func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachine) { stateMachineResources := cfFile.GetResourcesByType("AWS::Serverless::StateMachine") for _, r := range stateMachineResources { @@ -25,6 +25,7 @@ func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMach Tracing: getTracingConfiguration(r), } + // TODO: By default, the level is set to OFF if logging := r.GetProperty("Logging"); logging.IsNotNil() { stateMachine.LoggingConfiguration.Metadata = logging.Metadata() if level := logging.GetProperty("Level"); level.IsNotNil() { @@ -39,7 +40,7 @@ func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMach return stateMachines } -func getTracingConfiguration(r *parser2.Resource) sam.TracingConfiguration { +func getTracingConfiguration(r *parser.Resource) sam.TracingConfiguration { tracing := r.GetProperty("Tracing") if tracing.IsNil() { return sam.TracingConfiguration{ @@ -54,7 +55,7 @@ func getTracingConfiguration(r *parser2.Resource) sam.TracingConfiguration { } } -func setStateMachinePolicies(r *parser2.Resource, stateMachine *sam.StateMachine) { +func setStateMachinePolicies(r *parser.Resource, stateMachine *sam.StateMachine) { policies := r.GetProperty("Policies") if policies.IsNotNil() { if policies.IsString() { diff --git a/pkg/iac/adapters/cloudformation/aws/sam/tables.go b/pkg/iac/adapters/cloudformation/aws/sam/tables.go index 713f723bf319..89e66acdf514 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/tables.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/tables.go @@ -2,11 +2,11 @@ package sam import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getSimpleTables(cfFile parser2.FileContext) (tables []sam.SimpleTable) { +func getSimpleTables(cfFile parser.FileContext) (tables []sam.SimpleTable) { tableResources := cfFile.GetResourcesByType("AWS::Serverless::SimpleTable") for _, r := range tableResources { @@ -22,21 +22,18 @@ func getSimpleTables(cfFile parser2.FileContext) (tables []sam.SimpleTable) { return tables } -func getSSESpecification(r *parser2.Resource) sam.SSESpecification { - - spec := sam.SSESpecification{ - Metadata: r.Metadata(), - Enabled: iacTypes.BoolDefault(false, r.Metadata()), - KMSMasterKeyID: iacTypes.StringDefault("", r.Metadata()), - } - +func getSSESpecification(r *parser.Resource) sam.SSESpecification { if sse := r.GetProperty("SSESpecification"); sse.IsNotNil() { - spec = sam.SSESpecification{ + return sam.SSESpecification{ Metadata: sse.Metadata(), Enabled: sse.GetBoolProperty("SSEEnabled"), - KMSMasterKeyID: sse.GetStringProperty("KMSMasterKeyID"), + KMSMasterKeyID: sse.GetStringProperty("KMSMasterKeyId"), } } - return spec + return sam.SSESpecification{ + Metadata: r.Metadata(), + Enabled: iacTypes.BoolDefault(false, r.Metadata()), + KMSMasterKeyID: iacTypes.StringDefault("", r.Metadata()), + } } diff --git a/pkg/iac/adapters/cloudformation/aws/sns/sns_test.go b/pkg/iac/adapters/cloudformation/aws/sns/sns_test.go new file mode 100644 index 000000000000..25f271db2073 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/sns/sns_test.go @@ -0,0 +1,54 @@ +package sns + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sns" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected sns.SNS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MySNSTopic: + Type: AWS::SNS::Topic + Properties: + KmsMasterKeyId: mykey +`, + expected: sns.SNS{ + Topics: []sns.Topic{ + { + Encryption: sns.Encryption{ + KMSKeyID: types.StringTest("mykey"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MySNSTopic: + Type: AWS::SNS::Topic + `, + expected: sns.SNS{ + Topics: []sns.Topic{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/sqs/queue.go b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go index 2670dc299663..555fd54efd90 100644 --- a/pkg/iac/adapters/cloudformation/aws/sqs/queue.go +++ b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go @@ -21,7 +21,6 @@ func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { ManagedEncryption: iacTypes.Bool(false, r.Metadata()), KMSKeyID: r.GetStringProperty("KmsMasterKeyId"), }, - Policies: []iam.Policy{}, } if policy, err := getPolicy(r.ID(), ctx); err == nil { queue.Policies = append(queue.Policies, *policy) diff --git a/pkg/iac/adapters/cloudformation/aws/sqs/sqs_test.go b/pkg/iac/adapters/cloudformation/aws/sqs/sqs_test.go new file mode 100644 index 000000000000..8abeff2aca3e --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/sqs/sqs_test.go @@ -0,0 +1,86 @@ +package sqs + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sqs" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected sqs.SQS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MyQueue: + Type: AWS::SQS::Queue + Properties: + QueueName: "SampleQueue" + KmsMasterKeyId: mykey + SampleSQSPolicy: + Type: AWS::SQS::QueuePolicy + Properties: + Queues: + - !Ref MyQueue + PolicyDocument: + Statement: + - + Action: + - "SQS:SendMessage" + Effect: "Allow" + Resource: "arn:aws:sqs:us-east-2:444455556666:queue2" +`, + expected: sqs.SQS{ + Queues: []sqs.Queue{ + { + Encryption: sqs.Encryption{ + KMSKeyID: types.StringTest("mykey"), + }, + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"SQS:SendMessage"}). + WithResources([]string{"arn:aws:sqs:us-east-2:444455556666:queue2"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MySNSTopic: + Type: AWS::SQS::Queue + `, + expected: sqs.SQS{ + Queues: []sqs.Queue{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ssm/ssm_test.go b/pkg/iac/adapters/cloudformation/aws/ssm/ssm_test.go new file mode 100644 index 000000000000..9709207fec66 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/ssm/ssm_test.go @@ -0,0 +1,53 @@ +package ssm + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ssm" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected ssm.SSM + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MySecretA: + Type: 'AWS::SecretsManager::Secret' + Properties: + Name: MySecretForAppA + KmsKeyId: alias/exampleAlias +`, + expected: ssm.SSM{ + Secrets: []ssm.Secret{ + { + KMSKeyID: types.StringTest("alias/exampleAlias"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MySecretA: + Type: 'AWS::SecretsManager::Secret' + `, + expected: ssm.SSM{ + Secrets: []ssm.Secret{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces_test.go b/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces_test.go new file mode 100644 index 000000000000..41e821e6466d --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces_test.go @@ -0,0 +1,62 @@ +package workspaces + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected workspaces.WorkSpaces + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MyWorkSpace: + Type: AWS::WorkSpaces::Workspace + Properties: + RootVolumeEncryptionEnabled: true + UserVolumeEncryptionEnabled: true +`, + expected: workspaces.WorkSpaces{ + WorkSpaces: []workspaces.WorkSpace{ + { + RootVolume: workspaces.Volume{ + Encryption: workspaces.Encryption{ + Enabled: types.BoolTest(true), + }, + }, + UserVolume: workspaces.Volume{ + Encryption: workspaces.Encryption{ + Enabled: types.BoolTest(true), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyWorkSpace: + Type: AWS::WorkSpaces::Workspace + `, + expected: workspaces.WorkSpaces{ + WorkSpaces: []workspaces.WorkSpace{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/testutil/testutil.go b/pkg/iac/adapters/cloudformation/testutil/testutil.go new file mode 100644 index 000000000000..f908519d4106 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/testutil/testutil.go @@ -0,0 +1,25 @@ +package testutil + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +type adaptFn[T any] func(fctx parser.FileContext) T + +func AdaptAndCompare[T any](t *testing.T, source string, expected any, fn adaptFn[T]) { + fsys := testutil.CreateFS(t, map[string]string{ + "main.yaml": source, + }) + + fctx, err := parser.New().ParseFile(context.TODO(), fsys, "main.yaml") + require.NoError(t, err) + + adapted := fn(*fctx) + testutil.AssertDefsecEqual(t, expected, adapted) +} diff --git a/pkg/iac/adapters/terraform/aws/s3/adapt_test.go b/pkg/iac/adapters/terraform/aws/s3/adapt_test.go index 1d347d3520fe..65394abd3ea7 100644 --- a/pkg/iac/adapters/terraform/aws/s3/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/s3/adapt_test.go @@ -36,7 +36,7 @@ resource "aws_s3_bucket_public_access_block" "example_access_block"{ hasPublicAccess: true, }, { - desc: "public access block is found when using the bucket name as the lookup", + desc: "public access block is found when using the bucket id as the lookup", source: ` resource "aws_s3_bucket" "example" { bucket = "bucketname" @@ -254,6 +254,33 @@ func Test_Adapt(t *testing.T) { }, }, }, + { + name: "non-valid SSE algorithm", + terraform: ` +resource "aws_s3_bucket" "this" { + bucket = "test" +} + +resource "aws_s3_bucket_server_side_encryption_configuration" "this" { + bucket = aws_s3_bucket.this.id + rule { + apply_server_side_encryption_by_default { + sse_algorithm = "" + } + } +}`, + expected: s3.S3{ + Buckets: []s3.Bucket{ + { + Name: iacTypes.String("test", iacTypes.NewTestMetadata()), + Encryption: s3.Encryption{ + Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()), + }, + ACL: iacTypes.String("private", iacTypes.NewTestMetadata()), + }, + }, + }, + }, } for _, test := range tests { diff --git a/pkg/iac/adapters/terraform/aws/s3/bucket.go b/pkg/iac/adapters/terraform/aws/s3/bucket.go index ae5b2ddb2f4d..5ecf7e9ba21b 100644 --- a/pkg/iac/adapters/terraform/aws/s3/bucket.go +++ b/pkg/iac/adapters/terraform/aws/s3/bucket.go @@ -1,6 +1,10 @@ package s3 import ( + "slices" + + s3types "github.com/aws/aws-sdk-go-v2/service/s3/types" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3" "github.com/aquasecurity/trivy/pkg/iac/terraform" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" @@ -194,11 +198,13 @@ func isEncrypted(sseConfgihuration *terraform.Block) iacTypes.BoolValue { sseConfgihuration, "rule.apply_server_side_encryption_by_default.sse_algorithm", func(attr *terraform.Attribute, parent *terraform.Block) iacTypes.BoolValue { - if attr.IsNil() { + if attr.IsNil() || !attr.IsString() { return iacTypes.BoolDefault(false, parent.GetMetadata()) } + algoVal := attr.Value().AsString() + isValidAlgo := slices.Contains(s3types.ServerSideEncryption("").Values(), s3types.ServerSideEncryption(algoVal)) return iacTypes.Bool( - true, + isValidAlgo, attr.GetMetadata(), ) }, diff --git a/pkg/iac/adapters/terraform/tftestutil/testutil.go b/pkg/iac/adapters/terraform/tftestutil/testutil.go index 5503bfac5b99..57535cf151c5 100644 --- a/pkg/iac/adapters/terraform/tftestutil/testutil.go +++ b/pkg/iac/adapters/terraform/tftestutil/testutil.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/aquasecurity/trivy/internal/testutil" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/terraform" ) @@ -13,7 +13,7 @@ func CreateModulesFromSource(t *testing.T, source, ext string) terraform.Modules fs := testutil.CreateFS(t, map[string]string{ "source" + ext: source, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "."); err != nil { t.Fatal(err) } diff --git a/pkg/iac/ignore/parse.go b/pkg/iac/ignore/parse.go new file mode 100644 index 000000000000..075f1f621203 --- /dev/null +++ b/pkg/iac/ignore/parse.go @@ -0,0 +1,168 @@ +package ignore + +import ( + "errors" + "strings" + "time" + + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/aquasecurity/trivy/pkg/log" +) + +// RuleSectionParser defines the interface for parsing ignore rules. +type RuleSectionParser interface { + Key() string + Parse(string) bool + Param() any +} + +// Parse parses the configuration file and returns the Rules +func Parse(src, path string, parsers ...RuleSectionParser) Rules { + var rules Rules + for i, line := range strings.Split(src, "\n") { + line = strings.TrimSpace(line) + rng := types.NewRange(path, i+1, i+1, "", nil) + lineIgnores := parseLine(line, rng, parsers) + for _, lineIgnore := range lineIgnores { + rules = append(rules, lineIgnore) + } + } + + rules.shift() + + return rules +} + +func parseLine(line string, rng types.Range, parsers []RuleSectionParser) []Rule { + var rules []Rule + + sections := strings.Split(strings.TrimSpace(line), " ") + for _, section := range sections { + section := strings.TrimSpace(section) + section = strings.TrimLeftFunc(section, func(r rune) bool { + return r == '#' || r == '/' || r == '*' + }) + + section, exists := hasIgnoreRulePrefix(section) + if !exists { + continue + } + + rule, err := parseComment(section, rng, parsers) + if err != nil { + log.Logger.Debugf("Failed to parse rule at %s: %s", rng.String(), err.Error()) + continue + } + rules = append(rules, rule) + } + + return rules +} + +func hasIgnoreRulePrefix(s string) (string, bool) { + for _, prefix := range []string{"tfsec:", "trivy:"} { + if after, found := strings.CutPrefix(s, prefix); found { + return after, true + } + } + + return "", false +} + +func parseComment(input string, rng types.Range, parsers []RuleSectionParser) (Rule, error) { + rule := Rule{ + rng: rng, + sections: make(map[string]any), + } + + parsers = append(parsers, &expiryDateParser{ + rng: rng, + }) + + segments := strings.Split(input, ":") + + for i := 0; i < len(segments)-1; i += 2 { + key := segments[i] + val := segments[i+1] + if key == "ignore" { + // special case, because id and parameters are in the same section + idParser := &checkIDParser{ + StringMatchParser{SectionKey: "id"}, + } + if idParser.Parse(val) { + rule.sections[idParser.Key()] = idParser.Param() + } + } + + for _, parser := range parsers { + if parser.Key() != key { + continue + } + + if parser.Parse(val) { + rule.sections[parser.Key()] = parser.Param() + } + } + } + + if _, exists := rule.sections["id"]; !exists { + return Rule{}, errors.New("rule section with the `ignore` key is required") + } + + return rule, nil +} + +type StringMatchParser struct { + SectionKey string + param string +} + +func (s *StringMatchParser) Key() string { + return s.SectionKey +} + +func (s *StringMatchParser) Parse(str string) bool { + s.param = str + return str != "" +} + +func (s *StringMatchParser) Param() any { + return s.param +} + +type checkIDParser struct { + StringMatchParser +} + +func (s *checkIDParser) Parse(str string) bool { + if idx := strings.Index(str, "["); idx != -1 { + str = str[:idx] + } + return s.StringMatchParser.Parse(str) +} + +type expiryDateParser struct { + rng types.Range + expiry time.Time +} + +func (s *expiryDateParser) Key() string { + return "exp" +} + +func (s *expiryDateParser) Parse(str string) bool { + parsed, err := time.Parse("2006-01-02", str) + if err != nil { + log.Logger.Debugf("Incorrect time to ignore is specified: %s", str) + parsed = time.Time{} + } else if time.Now().After(parsed) { + log.Logger.Debug("Ignore rule time has expired for location: %s", s.rng.String()) + } + + s.expiry = parsed + return true +} + +func (s *expiryDateParser) Param() any { + return s.expiry +} diff --git a/pkg/iac/ignore/rule.go b/pkg/iac/ignore/rule.go new file mode 100644 index 000000000000..61057ce75f87 --- /dev/null +++ b/pkg/iac/ignore/rule.go @@ -0,0 +1,131 @@ +package ignore + +import ( + "regexp" + "slices" + "strings" + "time" + + "github.com/samber/lo" + + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +// Ignorer represents a function that checks if the rule should be ignored. +type Ignorer func(resultMeta types.Metadata, ignoredParam any) bool + +type Rules []Rule + +// Ignore checks if the rule should be ignored based on provided metadata, IDs, and ignorer functions. +func (r Rules) Ignore(m types.Metadata, ids []string, ignorers map[string]Ignorer) bool { + return slices.ContainsFunc(r, func(r Rule) bool { + return r.ignore(m, ids, ignorers) + }) +} + +func (r Rules) shift() { + var ( + currentRange *types.Range + offset int + ) + + for i := len(r) - 1; i > 0; i-- { + currentIgnore, nextIgnore := r[i], r[i-1] + if currentRange == nil { + currentRange = ¤tIgnore.rng + } + if nextIgnore.rng.GetStartLine()+1+offset == currentIgnore.rng.GetStartLine() { + r[i-1].rng = *currentRange + offset++ + } else { + currentRange = nil + offset = 0 + } + } +} + +// Rule represents a rule for ignoring vulnerabilities. +type Rule struct { + rng types.Range + sections map[string]any +} + +func (r Rule) ignore(m types.Metadata, ids []string, ignorers map[string]Ignorer) bool { + matchMeta, ok := r.matchRange(&m) + if !ok { + return false + } + + ignorers = lo.Assign(defaultIgnorers(ids), ignorers) + + for ignoreID, ignore := range ignorers { + if param, exists := r.sections[ignoreID]; exists { + if !ignore(*matchMeta, param) { + return false + } + } + } + + return true +} + +func (r Rule) matchRange(m *types.Metadata) (*types.Metadata, bool) { + metaHierarchy := m + for metaHierarchy != nil { + if r.rng.GetFilename() != metaHierarchy.Range().GetFilename() { + metaHierarchy = metaHierarchy.Parent() + continue + } + if metaHierarchy.Range().GetStartLine() == r.rng.GetStartLine()+1 || + metaHierarchy.Range().GetStartLine() == r.rng.GetStartLine() { + return metaHierarchy, true + } + metaHierarchy = metaHierarchy.Parent() + } + + return nil, false +} + +func defaultIgnorers(ids []string) map[string]Ignorer { + return map[string]Ignorer{ + "id": func(_ types.Metadata, param any) bool { + id, ok := param.(string) + if !ok { + return false + } + if id == "*" || len(ids) == 0 { + return true + } + + return slices.ContainsFunc(ids, func(s string) bool { + return MatchPattern(s, id) + }) + }, + "exp": func(_ types.Metadata, param any) bool { + expiry, ok := param.(time.Time) + return ok && time.Now().Before(expiry) + }, + } +} + +// MatchPattern checks if the pattern string matches the input pattern. +// The wildcard '*' in the pattern matches any sequence of characters. +func MatchPattern(input, pattern string) bool { + matched, err := regexp.MatchString(regexpFromPattern(pattern), input) + return err == nil && matched +} + +func regexpFromPattern(pattern string) string { + parts := strings.Split(pattern, "*") + if len(parts) == 1 { + return "^" + pattern + "$" + } + var sb strings.Builder + for i, literal := range parts { + if i > 0 { + sb.WriteString(".*") + } + sb.WriteString(regexp.QuoteMeta(literal)) + } + return "^" + sb.String() + "$" +} diff --git a/pkg/iac/ignore/rule_test.go b/pkg/iac/ignore/rule_test.go new file mode 100644 index 000000000000..da89ca2a3595 --- /dev/null +++ b/pkg/iac/ignore/rule_test.go @@ -0,0 +1,314 @@ +package ignore_test + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/ignore" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/stretchr/testify/assert" +) + +func metadataWithLine(path string, line int) types.Metadata { + return types.NewMetadata(types.NewRange(path, line, line, "", nil), "") +} + +func TestRules_Ignore(t *testing.T) { + + const filename = "test" + + type args struct { + metadata types.Metadata + ids []string + } + + tests := []struct { + name string + src string + args args + shouldIgnore bool + }{ + { + name: "no ignore", + src: `#test`, + shouldIgnore: false, + }, + { + name: "one ignore rule", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "blank line between rule and finding", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 3), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + { + name: "blank line between rules", + src: `#trivy:ignore:rule-1 + +#trivy:ignore:rule-2 +`, + args: args{ + metadata: metadataWithLine(filename, 4), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + { + name: "rule and a finding on the same line", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 1), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "rule and a finding on the same line", + src: `test #trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 1), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "multiple rules on one line", + src: `test #trivy:ignore:rule-1 #trivy:ignore:rule-2`, + args: args{ + metadata: metadataWithLine(filename, 1), + ids: []string{"rule-2"}, + }, + shouldIgnore: true, + }, + { + name: "rule and find from different files", + src: `test #trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine("another-file", 1), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "multiple ignore rule", + src: `#trivy:ignore:rule-1 +#trivy:ignore:rule-2 +`, + args: args{ + metadata: metadataWithLine(filename, 3), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "ignore section with params", + src: `#trivy:ignore:rule-1[param1=1]`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "id's don't match", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "without ignore section", + src: `#trivy:exp:2022-01-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "non valid ignore section", + src: `#trivy:ignore`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "ignore rule with expiry date passed", + src: `#trivy:ignore:rule-1:exp:2022-01-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + { + name: "ignore rule with expiry date not passed", + src: `#trivy:ignore:rule-1:exp:2026-01-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "ignore rule with invalid expiry date", + src: `#trivy:ignore:rule-1:exp:2026-99-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + { + name: "with valid wildcard", + src: `#trivy:ignore:rule-*`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "with non-valid wildcard", + src: `#trivy:ignore:rule-1-*d`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1-abc"}, + }, + shouldIgnore: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + rules := ignore.Parse(tt.src, filename) + got := rules.Ignore(tt.args.metadata, tt.args.ids, nil) + assert.Equal(t, tt.shouldIgnore, got) + }) + } +} + +func TestRules_IgnoreWithCustomIgnorer(t *testing.T) { + const filename = "test" + + type args struct { + metadata types.Metadata + ids []string + ignorers map[string]ignore.Ignorer + } + + tests := []struct { + name string + src string + parser ignore.RuleSectionParser + args args + shouldIgnore bool + }{ + { + name: "happy", + src: `#trivy:ignore:rule-1:ws:dev`, + parser: &ignore.StringMatchParser{ + SectionKey: "ws", + }, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + ignorers: map[string]ignore.Ignorer{ + "ws": func(_ types.Metadata, param any) bool { + ws, ok := param.(string) + if !ok { + return false + } + return ws == "dev" + }, + }, + }, + shouldIgnore: true, + }, + { + name: "with wildcard", + src: `#trivy:ignore:rule-1:ws:dev-*`, + parser: &ignore.StringMatchParser{ + SectionKey: "ws", + }, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + ignorers: map[string]ignore.Ignorer{ + "ws": func(_ types.Metadata, param any) bool { + ws, ok := param.(string) + if !ok { + return false + } + return ignore.MatchPattern("dev-stage1", ws) + }, + }, + }, + shouldIgnore: true, + }, + { + name: "bad", + src: `#trivy:ignore:rule-1:ws:prod`, + parser: &ignore.StringMatchParser{ + SectionKey: "ws", + }, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + ignorers: map[string]ignore.Ignorer{ + "ws": func(_ types.Metadata, param any) bool { + ws, ok := param.(string) + if !ok { + return false + } + return ws == "dev" + }, + }, + }, + shouldIgnore: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + rules := ignore.Parse(tt.src, filename, tt.parser) + got := rules.Ignore(tt.args.metadata, tt.args.ids, tt.args.ignorers) + assert.Equal(t, tt.shouldIgnore, got) + }) + } +} + +func TestMatchPattern(t *testing.T) { + tests := []struct { + input string + pattern string + expected bool + }{ + {"foo-test-bar", "*-test-*", true}, + {"foo-test-bar", "*-example-*", false}, + {"test", "*test", true}, + {"example", "test", false}, + {"example-test", "*-test*", true}, + {"example-test", "*example-*", true}, + } + + for _, tc := range tests { + t.Run(tc.input+":"+tc.pattern, func(t *testing.T) { + got := ignore.MatchPattern(tc.input, tc.pattern) + assert.Equal(t, tc.expected, got) + }) + } +} diff --git a/pkg/iac/providers/aws/ecs/ecs.go b/pkg/iac/providers/aws/ecs/ecs.go index 181c4a2ac90a..b0728c2bbf7f 100755 --- a/pkg/iac/providers/aws/ecs/ecs.go +++ b/pkg/iac/providers/aws/ecs/ecs.go @@ -87,9 +87,11 @@ func (j containerDefinitionJSON) convert(metadata iacTypes.Metadata) ContainerDe } type ContainerDefinition struct { - Metadata iacTypes.Metadata - Name iacTypes.StringValue - Image iacTypes.StringValue + Metadata iacTypes.Metadata + Name iacTypes.StringValue + Image iacTypes.StringValue + // TODO: CPU and Memory are strings + // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecs-taskdefinition.html#cfn-ecs-taskdefinition-cpu CPU iacTypes.IntValue Memory iacTypes.IntValue Essential iacTypes.BoolValue diff --git a/pkg/iac/scan/code_test.go b/pkg/iac/scan/code_test.go index e0591ed23c85..c3ffe3725ef1 100644 --- a/pkg/iac/scan/code_test.go +++ b/pkg/iac/scan/code_test.go @@ -5,13 +5,11 @@ import ( "strings" "testing" - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" - + "github.com/liamg/memoryfs" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/liamg/memoryfs" + iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) func TestResult_GetCode(t *testing.T) { diff --git a/pkg/iac/scan/result.go b/pkg/iac/scan/result.go index 861171e2dcc0..d9924c6aaeef 100644 --- a/pkg/iac/scan/result.go +++ b/pkg/iac/scan/result.go @@ -7,6 +7,7 @@ import ( "reflect" "strings" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/severity" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -261,6 +262,22 @@ func (r *Results) AddIgnored(source interface{}, descriptions ...string) { *r = append(*r, res) } +func (r *Results) Ignore(ignoreRules ignore.Rules, ignores map[string]ignore.Ignorer) { + for i, result := range *r { + allIDs := []string{ + result.Rule().LongID(), + result.Rule().AVDID, + strings.ToLower(result.Rule().AVDID), + result.Rule().ShortCode, + } + allIDs = append(allIDs, result.Rule().Aliases...) + + if ignoreRules.Ignore(result.Metadata(), allIDs, ignores) { + (*r)[i].OverrideStatus(StatusIgnored) + } + } +} + func (r *Results) SetRule(rule Rule) { for i := range *r { (*r)[i].rule = rule diff --git a/pkg/iac/scanners/cloudformation/cftypes/types.go b/pkg/iac/scanners/cloudformation/cftypes/types.go index 44d9c1fd2a93..0dc3b8b586a2 100644 --- a/pkg/iac/scanners/cloudformation/cftypes/types.go +++ b/pkg/iac/scanners/cloudformation/cftypes/types.go @@ -1,5 +1,7 @@ package cftypes +import "reflect" + type CfType string const ( @@ -9,4 +11,26 @@ const ( Bool CfType = "bool" Map CfType = "map" List CfType = "list" + Unknown CfType = "unknown" ) + +func TypeFromGoValue(value interface{}) CfType { + switch reflect.TypeOf(value).Kind() { + case reflect.String: + return String + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return Int + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return Int + case reflect.Float32, reflect.Float64: + return Float64 + case reflect.Bool: + return Bool + case reflect.Map: + return Map + case reflect.Slice: + return List + default: + return Unknown + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/file_context.go b/pkg/iac/scanners/cloudformation/parser/file_context.go index 4904d13f29d0..746dae7e024b 100644 --- a/pkg/iac/scanners/cloudformation/parser/file_context.go +++ b/pkg/iac/scanners/cloudformation/parser/file_context.go @@ -1,6 +1,7 @@ package parser import ( + "github.com/aquasecurity/trivy/pkg/iac/ignore" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -17,6 +18,7 @@ type FileContext struct { filepath string lines []string SourceFormat SourceFormat + Ignores ignore.Rules Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` Resources map[string]*Resource `json:"Resources" yaml:"Resources"` Globals map[string]*Resource `json:"Globals" yaml:"Globals"` diff --git a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go index bbfa372b7121..6063c39fc006 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go @@ -98,3 +98,26 @@ Resources: nodeTypeProp := testRes.GetStringProperty("CacheNodeType", "") assert.Equal(t, "cache.t2.micro", nodeTypeProp.Value()) } + +func Test_InferType(t *testing.T) { + source := `--- +Mappings: + ApiDB: + MultiAZ: + development: False +Resources: + ApiDB: + Type: AWS::RDS::DBInstance + Properties: + MultiAZ: !FindInMap [ApiDB, MultiAZ, development] +` + + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ApiDB") + require.NotNil(t, testRes) + + nodeTypeProp := testRes.GetBoolProperty("MultiAZ") + assert.False(t, nodeTypeProp.Value()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/intrinsics.go b/pkg/iac/scanners/cloudformation/parser/intrinsics.go index d455fd3d5c6e..1dadc4f6d6fd 100644 --- a/pkg/iac/scanners/cloudformation/parser/intrinsics.go +++ b/pkg/iac/scanners/cloudformation/parser/intrinsics.go @@ -78,8 +78,13 @@ func ResolveIntrinsicFunc(property *Property) (*Property, bool) { for funcName := range property.AsMap() { if fn := intrinsicFuncs[funcName]; fn != nil { - // - return fn(property) + prop, resolved := fn(property) + if prop == nil || !resolved { + return prop, false + } + + prop.inferType() + return prop, true } } return property, false diff --git a/pkg/iac/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go index 43e4099289c5..65bf1440432d 100644 --- a/pkg/iac/scanners/cloudformation/parser/parser.go +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -16,6 +16,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/debug" "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" ) @@ -165,12 +166,14 @@ func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (fctx * SourceFormat: sourceFmt, } - if strings.HasSuffix(strings.ToLower(path), ".json") { - if err := jfather.Unmarshal(content, fctx); err != nil { + switch sourceFmt { + case YamlSourceFormat: + if err := yaml.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } - } else { - if err := yaml.Unmarshal(content, fctx); err != nil { + fctx.Ignores = ignore.Parse(string(content), path) + case JsonSourceFormat: + if err := jfather.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } } diff --git a/pkg/iac/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go index 3cdbbb36b58a..ae0c57050a23 100644 --- a/pkg/iac/scanners/cloudformation/parser/property.go +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -113,19 +113,8 @@ func (p *Property) Range() iacTypes.Range { } func (p *Property) Metadata() iacTypes.Metadata { - base := p - if p.isFunction() { - if resolved, ok := p.resolveValue(); ok { - base = resolved - } - } - ref := NewCFReferenceWithValue(p.parentRange, *base, p.logicalId) - return iacTypes.NewMetadata(p.Range(), ref.String()) -} - -func (p *Property) MetadataWithValue(resolvedValue *Property) iacTypes.Metadata { - ref := NewCFReferenceWithValue(p.parentRange, *resolvedValue, p.logicalId) - return iacTypes.NewMetadata(p.Range(), ref.String()) + return iacTypes.NewMetadata(p.Range(), p.name). + WithParent(iacTypes.NewMetadata(p.parentRange, p.logicalId)) } func (p *Property) isFunction() bool { @@ -425,3 +414,11 @@ func convert(input interface{}) interface{} { } return input } + +func (p *Property) inferType() { + typ := cftypes.TypeFromGoValue(p.Inner.Value) + if typ == cftypes.Unknown { + return + } + p.Inner.Type = typ +} diff --git a/pkg/iac/scanners/cloudformation/parser/reference.go b/pkg/iac/scanners/cloudformation/parser/reference.go index 59cbf583c8cf..705eef2747af 100644 --- a/pkg/iac/scanners/cloudformation/parser/reference.go +++ b/pkg/iac/scanners/cloudformation/parser/reference.go @@ -1,15 +1,12 @@ package parser import ( - "fmt" - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) type CFReference struct { logicalId string resourceRange iacTypes.Range - resolvedValue Property } func NewCFReference(id string, resourceRange iacTypes.Range) CFReference { @@ -19,40 +16,6 @@ func NewCFReference(id string, resourceRange iacTypes.Range) CFReference { } } -func NewCFReferenceWithValue(resourceRange iacTypes.Range, resolvedValue Property, logicalId string) CFReference { - return CFReference{ - resourceRange: resourceRange, - resolvedValue: resolvedValue, - logicalId: logicalId, - } -} - func (cf CFReference) String() string { return cf.resourceRange.String() } - -func (cf CFReference) LogicalID() string { - return cf.logicalId -} - -func (cf CFReference) ResourceRange() iacTypes.Range { - return cf.resourceRange -} - -func (cf CFReference) PropertyRange() iacTypes.Range { - if cf.resolvedValue.IsNotNil() { - return cf.resolvedValue.Range() - } - return iacTypes.Range{} -} - -func (cf CFReference) DisplayValue() string { - if cf.resolvedValue.IsNotNil() { - return fmt.Sprintf("%v", cf.resolvedValue.RawValue()) - } - return "" -} - -func (cf *CFReference) Comment() string { - return cf.resolvedValue.Comment() -} diff --git a/pkg/iac/scanners/cloudformation/parser/resource.go b/pkg/iac/scanners/cloudformation/parser/resource.go index 69a864ad7cdf..bd1351f234df 100644 --- a/pkg/iac/scanners/cloudformation/parser/resource.go +++ b/pkg/iac/scanners/cloudformation/parser/resource.go @@ -100,11 +100,8 @@ func (r *Resource) GetProperty(path string) *Property { first := pathParts[0] property := &Property{} - for n, p := range r.properties() { - if n == first { - property = p - break - } + if p, exists := r.properties()[first]; exists { + property = p } if len(pathParts) == 1 || property.IsNil() { diff --git a/pkg/iac/scanners/cloudformation/parser/util.go b/pkg/iac/scanners/cloudformation/parser/util.go index a0792cf32865..03b9bf8da837 100644 --- a/pkg/iac/scanners/cloudformation/parser/util.go +++ b/pkg/iac/scanners/cloudformation/parser/util.go @@ -66,13 +66,15 @@ func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) erro if node.Content == nil { switch node.Tag { - case "!!int": propertyData.Type = cftypes.Int propertyData.Value, _ = strconv.Atoi(node.Value) case "!!bool": propertyData.Type = cftypes.Bool propertyData.Value, _ = strconv.ParseBool(node.Value) + case "!!float": + propertyData.Type = cftypes.Float64 + propertyData.Value, _ = strconv.ParseFloat(node.Value, 64) case "!!str", "!!string": propertyData.Type = cftypes.String propertyData.Value = node.Value diff --git a/pkg/iac/scanners/cloudformation/scanner.go b/pkg/iac/scanners/cloudformation/scanner.go index 4c0cbbc4216d..0920f4425fdb 100644 --- a/pkg/iac/scanners/cloudformation/scanner.go +++ b/pkg/iac/scanners/cloudformation/scanner.go @@ -15,7 +15,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -23,7 +23,7 @@ import ( func WithParameters(params map[string]any) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser2.WithParameters(params)) + s.addParserOptions(parser.WithParameters(params)) } } } @@ -31,7 +31,7 @@ func WithParameters(params map[string]any) options.ScannerOption { func WithParameterFiles(files ...string) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser2.WithParameterFiles(files...)) + s.addParserOptions(parser.WithParameterFiles(files...)) } } } @@ -39,7 +39,7 @@ func WithParameterFiles(files ...string) options.ScannerOption { func WithConfigsFS(fsys fs.FS) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser2.WithConfigsFS(fsys)) + s.addParserOptions(parser.WithConfigsFS(fsys)) } } } @@ -51,7 +51,7 @@ type Scanner struct { // nolint: gocritic debug debug.Logger policyDirs []string policyReaders []io.Reader - parser *parser2.Parser + parser *parser.Parser regoScanner *rego.Scanner skipRequired bool regoOnly bool @@ -131,7 +131,7 @@ func New(opts ...options.ScannerOption) *Scanner { opt(s) } s.addParserOptions(options.ParserWithSkipRequiredCheck(s.skipRequired)) - s.parser = parser2.New(s.parserOptions...) + s.parser = parser.New(s.parserOptions...) return s } @@ -206,7 +206,7 @@ func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.R return results, nil } -func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser2.FileContext, fsys fs.FS) (results scan.Results, err error) { +func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fsys fs.FS) (results scan.Results, err error) { state := adapter.Adapt(*cfCtx) if state == nil { return nil, nil @@ -247,7 +247,15 @@ func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner if err != nil { return nil, fmt.Errorf("rego scan error: %w", err) } - return append(results, regoResults...), nil + results = append(results, regoResults...) + + results.Ignore(cfCtx.Ignores, nil) + + for _, ignored := range results.GetIgnored() { + s.debug.Log("Ignored '%s' at '%s'.", ignored.Rule().LongID(), ignored.Range()) + } + + return results, nil } func getDescription(scanResult scan.Result, ref string) string { diff --git a/pkg/iac/scanners/cloudformation/scanner_test.go b/pkg/iac/scanners/cloudformation/scanner_test.go index 6aea88abc1af..3264609557ac 100644 --- a/pkg/iac/scanners/cloudformation/scanner_test.go +++ b/pkg/iac/scanners/cloudformation/scanner_test.go @@ -2,6 +2,7 @@ package cloudformation import ( "context" + "strings" "testing" "github.com/aquasecurity/trivy/internal/testutil" @@ -101,3 +102,130 @@ deny[res] { }, }, actualCode.Lines) } + +const bucketNameCheck = `# METADATA +# title: "test rego" +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-001 +# avd_id: AVD-AWS-001 +# provider: aws +# service: s3 +# severity: LOW +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package user.aws.aws001 + +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "test-bucket" + res := result.new("Denied", bucket.name) +} + +deny[res] { + bucket := input.aws.s3.buckets[_] + algo := bucket.encryption.algorithm + algo.value == "AES256" + res := result.new("Denied", algo) +} +` + +func TestIgnore(t *testing.T) { + tests := []struct { + name string + src string + ignored int + }{ + { + name: "without ignore", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket +`, + ignored: 0, + }, + { + name: "rule before resource", + src: `--- +Resources: +#trivy:ignore:AVD-AWS-001 + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket +`, + ignored: 1, + }, + { + name: "rule before property", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: +#trivy:ignore:AVD-AWS-001 + BucketName: test-bucket +`, + ignored: 1, + }, + { + name: "rule on the same line with the property", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket #trivy:ignore:AVD-AWS-001 +`, + ignored: 1, + }, + { + name: "rule on the same line with the nested property", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 #trivy:ignore:AVD-AWS-001 +`, + ignored: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fsys := testutil.CreateFS(t, map[string]string{ + "/code/main.yaml": tt.src, + }) + + scanner := New( + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithPolicyReader(strings.NewReader(bucketNameCheck)), + options.ScannerWithPolicyNamespaces("user"), + ) + + results, err := scanner.ScanFS(context.TODO(), fsys, "code") + require.NoError(t, err) + + if tt.ignored == 0 { + require.Len(t, results.GetFailed(), 1) + } else { + assert.Len(t, results.GetIgnored(), tt.ignored) + } + }) + } +} diff --git a/pkg/iac/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go index 7ae69e72eb95..c8bc8a73bedd 100644 --- a/pkg/iac/scanners/helm/parser/parser.go +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -7,7 +7,6 @@ import ( "fmt" "io" "io/fs" - "os" "path/filepath" "regexp" "sort" @@ -207,17 +206,7 @@ func (p *Parser) extractChartName(chartPath string) error { } func (p *Parser) RenderedChartFiles() ([]ChartFile, error) { - - tempDir, err := os.MkdirTemp(os.TempDir(), "defsec") - if err != nil { - return nil, err - } - - if err := p.writeBuildFiles(tempDir); err != nil { - return nil, err - } - - workingChart, err := loadChart(tempDir) + workingChart, err := p.loadChart() if err != nil { return nil, err } @@ -261,19 +250,36 @@ func (p *Parser) getRelease(chrt *chart.Chart) (*release.Release, error) { return r, nil } -func loadChart(tempFs string) (*chart.Chart, error) { - loadedChart, err := loader.Load(tempFs) +func (p *Parser) loadChart() (*chart.Chart, error) { + + var files []*loader.BufferedFile + + for _, filePath := range p.filepaths { + b, err := fs.ReadFile(p.workingFS, filePath) + if err != nil { + return nil, err + } + + filePath = strings.TrimPrefix(filePath, p.rootPath+"/") + filePath = filepath.ToSlash(filePath) + files = append(files, &loader.BufferedFile{ + Name: filePath, + Data: b, + }) + } + + c, err := loader.LoadFiles(files) if err != nil { return nil, err } - if req := loadedChart.Metadata.Dependencies; req != nil { - if err := action.CheckDependencies(loadedChart, req); err != nil { + if req := c.Metadata.Dependencies; req != nil { + if err := action.CheckDependencies(c, req); err != nil { return nil, err } } - return loadedChart, nil + return c, nil } func (*Parser) getRenderedManifests(manifestsKeys []string, splitManifests map[string]string) []ChartFile { @@ -305,24 +311,6 @@ func getManifestPath(manifest string) string { return manifestFilePathParts[0] } -func (p *Parser) writeBuildFiles(tempFs string) error { - for _, path := range p.filepaths { - content, err := fs.ReadFile(p.workingFS, path) - if err != nil { - return err - } - workingPath := strings.TrimPrefix(path, p.rootPath) - workingPath = filepath.Join(tempFs, workingPath) - if err := os.MkdirAll(filepath.Dir(workingPath), os.ModePerm); err != nil { - return err - } - if err := os.WriteFile(workingPath, content, os.ModePerm); err != nil { - return err - } - } - return nil -} - func (p *Parser) required(path string, workingFS fs.FS) bool { if p.skipRequired { return true diff --git a/pkg/iac/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go index b5e9686b3071..e2b666082c97 100644 --- a/pkg/iac/scanners/helm/scanner.go +++ b/pkg/iac/scanners/helm/scanner.go @@ -7,6 +7,7 @@ import ( "io/fs" "path/filepath" "strings" + "sync" "github.com/liamg/memoryfs" @@ -38,6 +39,8 @@ type Scanner struct { skipRequired bool frameworks []framework.Framework spec string + regoScanner *rego.Scanner + mu sync.Mutex } func (s *Scanner) SetSpec(spec string) { @@ -120,6 +123,10 @@ func (s *Scanner) SetRegoErrorLimit(_ int) {} func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, path string) (scan.Results, error) { + if err := s.initRegoScanner(target); err != nil { + return nil, fmt.Errorf("failed to init rego scanner: %w", err) + } + var results []scan.Result if err := fs.WalkDir(target, path, func(path string, d fs.DirEntry, err error) error { select { @@ -150,6 +157,7 @@ func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, path string) (scan.R } else { results = append(results, scanResults...) } + return fs.SkipDir } return nil @@ -177,14 +185,6 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) return nil, nil } - regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) - policyFS := target - if s.policyFS != nil { - policyFS = s.policyFS - } - if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, policyFS, s.policyDirs, s.policyReaders); err != nil { - return nil, fmt.Errorf("policies load: %w", err) - } for _, file := range chartFiles { file := file s.debug.Log("Processing rendered chart file: %s", file.TemplateFilePath) @@ -194,7 +194,7 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) return nil, fmt.Errorf("unmarshal yaml: %w", err) } for _, manifest := range manifests { - fileResults, err := regoScanner.ScanInput(ctx, rego.Input{ + fileResults, err := s.regoScanner.ScanInput(ctx, rego.Input{ Path: file.TemplateFilePath, Contents: manifest, FS: target, @@ -222,3 +222,18 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) } return results, nil } + +func (s *Scanner) initRegoScanner(srcFS fs.FS) error { + s.mu.Lock() + defer s.mu.Unlock() + if s.regoScanner != nil { + return nil + } + regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return err + } + s.regoScanner = regoScanner + return nil +} diff --git a/pkg/iac/scanners/helm/test/option_test.go b/pkg/iac/scanners/helm/test/option_test.go index bca7619097c8..3c03cdb67d23 100644 --- a/pkg/iac/scanners/helm/test/option_test.go +++ b/pkg/iac/scanners/helm/test/option_test.go @@ -7,10 +7,10 @@ import ( "strings" "testing" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" ) @@ -37,10 +37,10 @@ func Test_helm_parser_with_options_with_values_file(t *testing.T) { var opts []options.ParserOption if test.valuesFile != "" { - opts = append(opts, parser2.OptionWithValuesFile(test.valuesFile)) + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) } - helmParser, err := parser2.New(chartName, opts...) + helmParser, err := parser.New(chartName, opts...) require.NoError(t, err) require.NoError(t, helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".")) manifests, err := helmParser.RenderedChartFiles() @@ -87,14 +87,14 @@ func Test_helm_parser_with_options_with_set_value(t *testing.T) { var opts []options.ParserOption if test.valuesFile != "" { - opts = append(opts, parser2.OptionWithValuesFile(test.valuesFile)) + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) } if test.values != "" { - opts = append(opts, parser2.OptionWithValues(test.values)) + opts = append(opts, parser.OptionWithValues(test.values)) } - helmParser, err := parser2.New(chartName, opts...) + helmParser, err := parser.New(chartName, opts...) require.NoError(t, err) err = helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) @@ -141,10 +141,10 @@ func Test_helm_parser_with_options_with_api_versions(t *testing.T) { var opts []options.ParserOption if len(test.apiVersions) > 0 { - opts = append(opts, parser2.OptionWithAPIVersions(test.apiVersions...)) + opts = append(opts, parser.OptionWithAPIVersions(test.apiVersions...)) } - helmParser, err := parser2.New(chartName, opts...) + helmParser, err := parser.New(chartName, opts...) require.NoError(t, err) err = helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) diff --git a/pkg/iac/scanners/helm/test/parser_test.go b/pkg/iac/scanners/helm/test/parser_test.go index 751649be5e3c..85a69469fb5d 100644 --- a/pkg/iac/scanners/helm/test/parser_test.go +++ b/pkg/iac/scanners/helm/test/parser_test.go @@ -32,12 +32,9 @@ func Test_helm_parser(t *testing.T) { for _, test := range tests { t.Run(test.testName, func(t *testing.T) { chartName := test.chartName - - t.Logf("Running test: %s", test.testName) - helmParser, err := parser.New(chartName) require.NoError(t, err) - require.NoError(t, helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".")) + require.NoError(t, helmParser.ParseFS(context.TODO(), os.DirFS("testdata"), chartName)) manifests, err := helmParser.RenderedChartFiles() require.NoError(t, err) diff --git a/pkg/iac/scanners/helm/test/scanner_test.go b/pkg/iac/scanners/helm/test/scanner_test.go index 67099af2bb36..a46031a8fb98 100644 --- a/pkg/iac/scanners/helm/test/scanner_test.go +++ b/pkg/iac/scanners/helm/test/scanner_test.go @@ -318,3 +318,44 @@ deny[res] { require.NoError(t, err) assert.NotNil(t, code) } + +func TestScanSubchartOnce(t *testing.T) { + check := `# METADATA +# title: "Test rego" +# description: "Test rego" +# scope: package +# schemas: +# - input: schema["kubernetes"] +# custom: +# id: ID001 +# avd_id: AVD-USR-ID001 +# severity: LOW +# input: +# selector: +# - type: kubernetes +# subtypes: +# - kind: pod +package user.kubernetes.ID001 + +import data.lib.kubernetes + +deny[res] { + container := kubernetes.containers[_] + container.securityContext.readOnlyRootFilesystem == false + res := result.new("set 'securityContext.readOnlyRootFilesystem' to true", container) +} +` + + scanner := helm.New( + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyNamespaces("user"), + options.ScannerWithPolicyReader(strings.NewReader(check)), + ) + + results, err := scanner.ScanFS(context.TODO(), os.DirFS("testdata/with-subchart"), ".") + require.NoError(t, err) + require.Len(t, results, 1) + + assert.Len(t, results.GetFailed(), 0) +} diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/Chart.yaml new file mode 100644 index 000000000000..3c8c9b71ae45 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: test +description: A Helm chart for Kubernetes +type: application +version: 0.1.0 +appVersion: "1.16.0" diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/Chart.yaml new file mode 100644 index 000000000000..45cdc636218e --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: nginx +description: A Helm chart for Kubernetes +type: application +version: 0.1.0 +appVersion: "1.16.0" diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/templates/pod.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/templates/pod.yaml new file mode 100644 index 000000000000..70b3a84a8130 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/templates/pod.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Pod +metadata: + name: nginx +spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 8080 + securityContext: + readOnlyRootFilesystem: {{ .Values.readOnlyFs }} diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/values.yaml new file mode 100644 index 000000000000..ff3cff9db1e3 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/values.yaml @@ -0,0 +1 @@ +readOnlyFs: false diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/values.yaml new file mode 100644 index 000000000000..1e51a8fed1da --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/values.yaml @@ -0,0 +1,2 @@ +nginx: + readOnlyFs: true diff --git a/pkg/iac/scanners/terraform/deterministic_test.go b/pkg/iac/scanners/terraform/deterministic_test.go index d47161ec0059..258fe5bbbd16 100644 --- a/pkg/iac/scanners/terraform/deterministic_test.go +++ b/pkg/iac/scanners/terraform/deterministic_test.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/trivy/internal/testutil" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/stretchr/testify/require" ) @@ -39,12 +39,12 @@ locals { }) for i := 0; i < 100; i++ { - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), ".") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + results, _ := executor.New().Execute(modules) require.Len(t, results.GetFailed(), 2) } } diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go index 003b5b7f4db2..88dc1fa9801c 100644 --- a/pkg/iac/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -1,63 +1,37 @@ package executor import ( + "fmt" "runtime" "sort" - "strings" - "time" + + "github.com/zclconf/go-cty/cty" adapter "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform" "github.com/aquasecurity/trivy/pkg/iac/debug" "github.com/aquasecurity/trivy/pkg/iac/framework" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/iac/severity" - "github.com/aquasecurity/trivy/pkg/iac/state" "github.com/aquasecurity/trivy/pkg/iac/terraform" + "github.com/aquasecurity/trivy/pkg/iac/types" ) // Executor scans HCL blocks by running all registered rules against them type Executor struct { - enableIgnores bool - excludedRuleIDs []string - excludeIgnoresIDs []string - includedRuleIDs []string - ignoreCheckErrors bool - workspaceName string - useSingleThread bool - debug debug.Logger - resultsFilters []func(scan.Results) scan.Results - alternativeIDProviderFunc func(string) []string - severityOverrides map[string]string - regoScanner *rego.Scanner - regoOnly bool - stateFuncs []func(*state.State) - frameworks []framework.Framework -} - -type Metrics struct { - Timings struct { - Adaptation time.Duration - RunningChecks time.Duration - } - Counts struct { - Ignored int - Failed int - Passed int - Critical int - High int - Medium int - Low int - } + workspaceName string + debug debug.Logger + resultsFilters []func(scan.Results) scan.Results + regoScanner *rego.Scanner + regoOnly bool + frameworks []framework.Framework } // New creates a new Executor func New(options ...Option) *Executor { s := &Executor{ - ignoreCheckErrors: true, - enableIgnores: true, - regoOnly: false, + regoOnly: false, } for _, option := range options { option(s) @@ -65,183 +39,56 @@ func New(options ...Option) *Executor { return s } -// Find element in list -func checkInList(id string, altIDs, list []string) bool { - for _, codeIgnored := range list { - if codeIgnored == id { - return true - } - for _, alt := range altIDs { - if alt == codeIgnored { - return true - } - } - } - return false -} - -func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, error) { - - var metrics Metrics +func (e *Executor) Execute(modules terraform.Modules) (scan.Results, error) { e.debug.Log("Adapting modules...") - adaptationTime := time.Now() infra := adapter.Adapt(modules) - metrics.Timings.Adaptation = time.Since(adaptationTime) e.debug.Log("Adapted %d module(s) into defsec state data.", len(modules)) threads := runtime.NumCPU() if threads > 1 { threads-- } - if e.useSingleThread { - threads = 1 - } - e.debug.Log("Using max routines of %d", threads) - e.debug.Log("Applying state modifier functions...") - for _, f := range e.stateFuncs { - f(infra) - } + e.debug.Log("Using max routines of %d", threads) - checksTime := time.Now() registeredRules := rules.GetRegistered(e.frameworks...) e.debug.Log("Initialized %d rule(s).", len(registeredRules)) - pool := NewPool(threads, registeredRules, modules, infra, e.ignoreCheckErrors, e.regoScanner, e.regoOnly) + pool := NewPool(threads, registeredRules, modules, infra, e.regoScanner, e.regoOnly) e.debug.Log("Created pool with %d worker(s) to apply rules.", threads) + results, err := pool.Run() if err != nil { - return nil, metrics, err - } - metrics.Timings.RunningChecks = time.Since(checksTime) - e.debug.Log("Finished applying rules.") - - if e.enableIgnores { - e.debug.Log("Applying ignores...") - var ignores terraform.Ignores - for _, module := range modules { - ignores = append(ignores, module.Ignores()...) - } - - ignores = e.removeExcludedIgnores(ignores) - - for i, result := range results { - allIDs := []string{ - result.Rule().LongID(), - result.Rule().AVDID, - strings.ToLower(result.Rule().AVDID), - result.Rule().ShortCode, - } - allIDs = append(allIDs, result.Rule().Aliases...) - - if e.alternativeIDProviderFunc != nil { - allIDs = append(allIDs, e.alternativeIDProviderFunc(result.Rule().LongID())...) - } - if ignores.Covering( - modules, - result.Metadata(), - e.workspaceName, - allIDs..., - ) != nil { - e.debug.Log("Ignored '%s' at '%s'.", result.Rule().LongID(), result.Range()) - results[i].OverrideStatus(scan.StatusIgnored) - } - } - } else { - e.debug.Log("Ignores are disabled.") + return nil, err } - results = e.updateSeverity(results) - results = e.filterResults(results) - metrics.Counts.Ignored = len(results.GetIgnored()) - metrics.Counts.Passed = len(results.GetPassed()) - metrics.Counts.Failed = len(results.GetFailed()) + e.debug.Log("Finished applying rules.") - for _, res := range results.GetFailed() { - switch res.Severity() { - case severity.Critical: - metrics.Counts.Critical++ - case severity.High: - metrics.Counts.High++ - case severity.Medium: - metrics.Counts.Medium++ - case severity.Low: - metrics.Counts.Low++ - } + e.debug.Log("Applying ignores...") + var ignores ignore.Rules + for _, module := range modules { + ignores = append(ignores, module.Ignores()...) } - e.sortResults(results) - return results, metrics, nil -} - -func (e *Executor) removeExcludedIgnores(ignores terraform.Ignores) terraform.Ignores { - var filteredIgnores terraform.Ignores - for _, ignore := range ignores { - if !contains(e.excludeIgnoresIDs, ignore.RuleID) { - filteredIgnores = append(filteredIgnores, ignore) - } + ignorers := map[string]ignore.Ignorer{ + "ws": workspaceIgnorer(e.workspaceName), + "ignore": attributeIgnorer(modules), } - return filteredIgnores -} -func contains(arr []string, s string) bool { - for _, elem := range arr { - if elem == s { - return true - } - } - return false -} + results.Ignore(ignores, ignorers) -func (e *Executor) updateSeverity(results []scan.Result) scan.Results { - if len(e.severityOverrides) == 0 { - return results + for _, ignored := range results.GetIgnored() { + e.debug.Log("Ignored '%s' at '%s'.", ignored.Rule().LongID(), ignored.Range()) } - var overriddenResults scan.Results - for _, res := range results { - for code, sev := range e.severityOverrides { - - var altMatch bool - if e.alternativeIDProviderFunc != nil { - alts := e.alternativeIDProviderFunc(res.Rule().LongID()) - for _, alt := range alts { - if alt == code { - altMatch = true - break - } - } - } - - if altMatch || res.Rule().LongID() == code { - overrides := scan.Results([]scan.Result{res}) - override := res.Rule() - override.Severity = severity.Severity(sev) - overrides.SetRule(override) - res = overrides[0] - } - } - overriddenResults = append(overriddenResults, res) - } + results = e.filterResults(results) - return overriddenResults + e.sortResults(results) + return results, nil } func (e *Executor) filterResults(results scan.Results) scan.Results { - includedOnly := len(e.includedRuleIDs) > 0 - for i, result := range results { - id := result.Rule().LongID() - var altIDs []string - if e.alternativeIDProviderFunc != nil { - altIDs = e.alternativeIDProviderFunc(id) - } - if (includedOnly && !checkInList(id, altIDs, e.includedRuleIDs)) || checkInList(id, altIDs, e.excludedRuleIDs) { - e.debug.Log("Excluding '%s' at '%s'.", result.Rule().LongID(), result.Range()) - results[i].OverrideStatus(scan.StatusIgnored) - } - } - if len(e.resultsFilters) > 0 && len(results) > 0 { before := len(results.GetIgnored()) e.debug.Log("Applying %d results filters to %d results...", len(results), before) @@ -266,3 +113,60 @@ func (e *Executor) sortResults(results []scan.Result) { } }) } + +func ignoreByParams(params map[string]string, modules terraform.Modules, m *types.Metadata) bool { + if len(params) == 0 { + return true + } + block := modules.GetBlockByIgnoreRange(m) + if block == nil { + return true + } + for key, val := range params { + attr, _ := block.GetNestedAttribute(key) + if attr.IsNil() || !attr.Value().IsKnown() { + return false + } + switch attr.Type() { + case cty.String: + if !attr.Equals(val) { + return false + } + case cty.Number: + bf := attr.Value().AsBigFloat() + f64, _ := bf.Float64() + comparableInt := fmt.Sprintf("%d", int(f64)) + comparableFloat := fmt.Sprintf("%f", f64) + if val != comparableInt && val != comparableFloat { + return false + } + case cty.Bool: + if fmt.Sprintf("%t", attr.IsTrue()) != val { + return false + } + default: + return false + } + } + return true +} + +func workspaceIgnorer(ws string) ignore.Ignorer { + return func(_ types.Metadata, param any) bool { + ignoredWorkspace, ok := param.(string) + if !ok { + return false + } + return ignore.MatchPattern(ws, ignoredWorkspace) + } +} + +func attributeIgnorer(modules terraform.Modules) ignore.Ignorer { + return func(resultMeta types.Metadata, param any) bool { + params, ok := param.(map[string]string) + if !ok { + return false + } + return ignoreByParams(params, modules, &resultMeta) + } +} diff --git a/pkg/iac/scanners/terraform/executor/executor_test.go b/pkg/iac/scanners/terraform/executor/executor_test.go index 952803a507f5..ac663c313c17 100644 --- a/pkg/iac/scanners/terraform/executor/executor_test.go +++ b/pkg/iac/scanners/terraform/executor/executor_test.go @@ -8,7 +8,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/severity" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/assert" @@ -47,12 +47,15 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := New().Execute(modules) + + results, err := New().Execute(modules) + assert.Error(t, err) + assert.Equal(t, len(results.GetFailed()), 0) } @@ -69,12 +72,13 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + _, err = New().Execute(modules) assert.Error(t, err) } @@ -91,12 +95,15 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := New().Execute(modules) + + results, _ := New().Execute(modules) + require.NoError(t, err) + assert.Equal(t, len(results.GetFailed()), 0) } @@ -113,12 +120,12 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + _, err = New().Execute(modules) assert.Error(t, err) } diff --git a/pkg/iac/scanners/terraform/executor/option.go b/pkg/iac/scanners/terraform/executor/option.go index d32abb7afdcb..a58d72867b54 100644 --- a/pkg/iac/scanners/terraform/executor/option.go +++ b/pkg/iac/scanners/terraform/executor/option.go @@ -7,7 +7,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/framework" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/iac/state" ) type Option func(s *Executor) @@ -18,84 +17,30 @@ func OptionWithFrameworks(frameworks ...framework.Framework) Option { } } -func OptionWithAlternativeIDProvider(f func(string) []string) Option { - return func(s *Executor) { - s.alternativeIDProviderFunc = f - } -} - func OptionWithResultsFilter(f func(scan.Results) scan.Results) Option { return func(s *Executor) { s.resultsFilters = append(s.resultsFilters, f) } } -func OptionWithSeverityOverrides(overrides map[string]string) Option { - return func(s *Executor) { - s.severityOverrides = overrides - } -} - func OptionWithDebugWriter(w io.Writer) Option { return func(s *Executor) { s.debug = debug.New(w, "terraform", "executor") } } -func OptionNoIgnores() Option { - return func(s *Executor) { - s.enableIgnores = false - } -} - -func OptionExcludeRules(ruleIDs []string) Option { - return func(s *Executor) { - s.excludedRuleIDs = ruleIDs - } -} - -func OptionExcludeIgnores(ruleIDs []string) Option { - return func(s *Executor) { - s.excludeIgnoresIDs = ruleIDs - } -} - -func OptionIncludeRules(ruleIDs []string) Option { - return func(s *Executor) { - s.includedRuleIDs = ruleIDs - } -} - -func OptionStopOnErrors(stop bool) Option { - return func(s *Executor) { - s.ignoreCheckErrors = !stop - } -} - func OptionWithWorkspaceName(workspaceName string) Option { return func(s *Executor) { s.workspaceName = workspaceName } } -func OptionWithSingleThread(single bool) Option { - return func(s *Executor) { - s.useSingleThread = single - } -} - func OptionWithRegoScanner(s *rego.Scanner) Option { return func(e *Executor) { e.regoScanner = s } } -func OptionWithStateFunc(f ...func(*state.State)) Option { - return func(e *Executor) { - e.stateFuncs = f - } -} - func OptionWithRegoOnly(regoOnly bool) Option { return func(e *Executor) { e.regoOnly = regoOnly diff --git a/pkg/iac/scanners/terraform/executor/pool.go b/pkg/iac/scanners/terraform/executor/pool.go index a62fbe510de0..69b8405ee3a7 100644 --- a/pkg/iac/scanners/terraform/executor/pool.go +++ b/pkg/iac/scanners/terraform/executor/pool.go @@ -17,24 +17,22 @@ import ( ) type Pool struct { - size int - modules terraform.Modules - state *state.State - rules []types.RegisteredRule - ignoreErrors bool - rs *rego.Scanner - regoOnly bool + size int + modules terraform.Modules + state *state.State + rules []types.RegisteredRule + rs *rego.Scanner + regoOnly bool } -func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, st *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { +func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, st *state.State, regoScanner *rego.Scanner, regoOnly bool) *Pool { return &Pool{ - size: size, - rules: rules, - state: st, - modules: modules, - ignoreErrors: ignoreErrors, - rs: regoScanner, - regoOnly: regoOnly, + size: size, + rules: rules, + state: st, + modules: modules, + rs: regoScanner, + regoOnly: regoOnly, } } @@ -69,17 +67,15 @@ func (p *Pool) Run() (scan.Results, error) { for _, module := range p.modules { mod := *module outgoing <- &hclModuleRuleJob{ - module: &mod, - rule: r, - ignoreErrors: p.ignoreErrors, + module: &mod, + rule: r, } } } else { // run defsec rule outgoing <- &infraRuleJob{ - state: p.state, - rule: r, - ignoreErrors: p.ignoreErrors, + state: p.state, + rule: r, } } } @@ -105,14 +101,11 @@ type Job interface { type infraRuleJob struct { state *state.State rule types.RegisteredRule - - ignoreErrors bool } type hclModuleRuleJob struct { - module *terraform.Module - rule types.RegisteredRule - ignoreErrors bool + module *terraform.Module + rule types.RegisteredRule } type regoJob struct { @@ -122,24 +115,21 @@ type regoJob struct { } func (h *infraRuleJob) Run() (_ scan.Results, err error) { - if h.ignoreErrors { - defer func() { - if panicErr := recover(); panicErr != nil { - err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) - } - }() - } + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() + return h.rule.Evaluate(h.state), err } func (h *hclModuleRuleJob) Run() (results scan.Results, err error) { - if h.ignoreErrors { - defer func() { - if panicErr := recover(); panicErr != nil { - err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) - } - }() - } + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() customCheck := h.rule.GetRule().CustomChecks.Terraform for _, block := range h.module.GetBlocks() { if !isCustomCheckRequiredForBlock(customCheck, block) { diff --git a/pkg/iac/scanners/terraform/executor/statistics.go b/pkg/iac/scanners/terraform/executor/statistics.go deleted file mode 100644 index fc42985747a5..000000000000 --- a/pkg/iac/scanners/terraform/executor/statistics.go +++ /dev/null @@ -1,92 +0,0 @@ -package executor - -import ( - "encoding/json" - "fmt" - "io" - "sort" - "strconv" - "strings" - - "github.com/olekukonko/tablewriter" - - "github.com/aquasecurity/trivy/pkg/iac/scan" -) - -type StatisticsItem struct { - RuleID string `json:"rule_id"` - RuleDescription string `json:"rule_description"` - Links []string `json:"links"` - Count int `json:"count"` -} - -type Statistics []StatisticsItem - -type StatisticsResult struct { - Result Statistics `json:"results"` -} - -func SortStatistics(statistics Statistics) Statistics { - sort.Slice(statistics, func(i, j int) bool { - return statistics[i].Count > statistics[j].Count - }) - return statistics -} - -func (statistics Statistics) PrintStatisticsTable(format string, w io.Writer) error { - // lovely is the default so we keep it like that - if format != "lovely" && format != "markdown" && format != "json" { - return fmt.Errorf("you must specify only lovely, markdown or json format with --run-statistics") - } - - sorted := SortStatistics(statistics) - - if format == "json" { - result := StatisticsResult{Result: sorted} - val, err := json.MarshalIndent(result, "", " ") - if err != nil { - return err - } - - _, _ = fmt.Fprintln(w, string(val)) - - return nil - } - - table := tablewriter.NewWriter(w) - table.SetHeader([]string{"Rule ID", "Description", "Link", "Count"}) - table.SetRowLine(true) - - if format == "markdown" { - table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) - table.SetCenterSeparator("|") - } - - for _, item := range sorted { - table.Append([]string{item.RuleID, - item.RuleDescription, - strings.Join(item.Links, "\n"), - strconv.Itoa(item.Count)}) - } - - table.Render() - - return nil -} - -func AddStatisticsCount(statistics Statistics, result scan.Result) Statistics { - for i, statistic := range statistics { - if statistic.RuleID == result.Rule().LongID() { - statistics[i].Count += 1 - return statistics - } - } - statistics = append(statistics, StatisticsItem{ - RuleID: result.Rule().LongID(), - RuleDescription: result.Rule().Summary, - Links: result.Rule().Links, - Count: 1, - }) - - return statistics -} diff --git a/pkg/iac/scanners/terraform/ignore_test.go b/pkg/iac/scanners/terraform/ignore_test.go index 6e561d256653..ddddd7a6e04e 100644 --- a/pkg/iac/scanners/terraform/ignore_test.go +++ b/pkg/iac/scanners/terraform/ignore_test.go @@ -24,12 +24,18 @@ var exampleRule = scan.Rule{ Terraform: &scan.TerraformCustomCheck{ RequiredLabels: []string{"bad"}, Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { - attr := resourceBlock.GetAttribute("secure") - if attr.IsNil() { - results.Add("example problem", resourceBlock) - } - if attr.IsFalse() { - results.Add("example problem", attr) + if attr, _ := resourceBlock.GetNestedAttribute("secure_settings.enabled"); attr.IsNotNil() { + if attr.IsFalse() { + results.Add("example problem", attr) + } + } else { + attr := resourceBlock.GetAttribute("secure") + if attr.IsNil() { + results.Add("example problem", resourceBlock) + } + if attr.IsFalse() { + results.Add("example problem", attr) + } } return }, @@ -44,58 +50,92 @@ func Test_IgnoreAll(t *testing.T) { inputOptions string assertLength int }{ - {name: "IgnoreAll", inputOptions: ` + { + name: "IgnoreAll", + inputOptions: ` resource "bad" "my-rule" { secure = false // tfsec:ignore:* } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlock", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlock", + inputOptions: ` // tfsec:ignore:* resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockMatchingParamBool", + inputOptions: ` // tfsec:ignore:*[secure=false] resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockNotMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockNotMatchingParamBool", + inputOptions: ` // tfsec:ignore:*[secure=true] resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "IgnoreLineAboveTheBlockMatchingParamString", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineAboveTheBlockMatchingParamString", + inputOptions: ` // tfsec:ignore:*[name=myrule] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockNotMatchingParamString", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockNotMatchingParamString", + inputOptions: ` // tfsec:ignore:*[name=myrule2] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 1}, - {name: "IgnoreLineAboveTheBlockMatchingParamInt", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineAboveTheBlockMatchingParamInt", + inputOptions: ` // tfsec:ignore:*[port=123] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockNotMatchingParamInt", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockNotMatchingParamInt", + inputOptions: ` // tfsec:ignore:*[port=456] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 1}, - {name: "IgnoreLineStackedAboveTheBlock", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineStackedAboveTheBlock", + inputOptions: ` // tfsec:ignore:* // tfsec:ignore:a // tfsec:ignore:b @@ -104,8 +144,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineStackedAboveTheBlockWithoutMatch", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineStackedAboveTheBlockWithoutMatch", + inputOptions: ` #tfsec:ignore:* #tfsec:ignore:x @@ -116,8 +160,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "IgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", + inputOptions: ` #tfsec:ignore:* #tfsec:ignore:a #tfsec:ignore:b @@ -126,8 +174,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineStackedAboveTheBlockWithoutSpaces", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineStackedAboveTheBlockWithoutSpaces", + inputOptions: ` //tfsec:ignore:* //tfsec:ignore:a //tfsec:ignore:b @@ -136,135 +188,261 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheLine", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheLine", + inputOptions: ` resource "bad" "my-rule" { # tfsec:ignore:aws-service-abc123 secure = false } -`, assertLength: 0}, - {name: "IgnoreWithExpDateIfDateBreachedThenDontIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreWithExpDateIfDateBreachedThenDontIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # tfsec:ignore:aws-service-abc123:exp:2000-01-02 } -`, assertLength: 1}, - {name: "IgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # tfsec:ignore:aws-service-abc123:exp:2221-01-02 } -`, assertLength: 0}, - {name: "IgnoreWithExpDateIfDateInvalidThenDropTheIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreWithExpDateIfDateInvalidThenDropTheIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # tfsec:ignore:aws-service-abc123:exp:2221-13-02 } -`, assertLength: 1}, - {name: "IgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` #tfsec:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "IgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` # tfsec:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "IgnoreForImpliedIAMResource", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreForImpliedIAMResource", + inputOptions: ` terraform { -required_version = "~> 1.1.6" + required_version = "~> 1.1.6" -required_providers { -aws = { -source = "hashicorp/aws" -version = "~> 3.48" -} -} + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.48" + } + } } # Retrieve an IAM group defined outside of this Terraform config. # tfsec:ignore:aws-iam-enforce-mfa data "aws_iam_group" "externally_defined_group" { -group_name = "group-name" # tfsec:ignore:aws-iam-enforce-mfa + group_name = "group-name" # tfsec:ignore:aws-iam-enforce-mfa } # Create an IAM policy and attach it to the group. # tfsec:ignore:aws-iam-enforce-mfa resource "aws_iam_policy" "test_policy" { -name = "test-policy" # tfsec:ignore:aws-iam-enforce-mfa -policy = data.aws_iam_policy_document.test_policy.json # tfsec:ignore:aws-iam-enforce-mfa + name = "test-policy" # tfsec:ignore:aws-iam-enforce-mfa + policy = data.aws_iam_policy_document.test_policy.json # tfsec:ignore:aws-iam-enforce-mfa } # tfsec:ignore:aws-iam-enforce-mfa resource "aws_iam_group_policy_attachment" "test_policy_attachment" { -group = data.aws_iam_group.externally_defined_group.group_name # tfsec:ignore:aws-iam-enforce-mfa -policy_arn = aws_iam_policy.test_policy.arn # tfsec:ignore:aws-iam-enforce-mfa + group = data.aws_iam_group.externally_defined_group.group_name # tfsec:ignore:aws-iam-enforce-mfa + policy_arn = aws_iam_policy.test_policy.arn # tfsec:ignore:aws-iam-enforce-mfa } # tfsec:ignore:aws-iam-enforce-mfa data "aws_iam_policy_document" "test_policy" { -statement { -sid = "PublishToCloudWatch" # tfsec:ignore:aws-iam-enforce-mfa -actions = [ -"cloudwatch:PutMetricData", # tfsec:ignore:aws-iam-enforce-mfa -] -resources = ["*"] # tfsec:ignore:aws-iam-enforce-mfa -} -} -`, assertLength: 0}, - {name: "TrivyIgnoreAll", inputOptions: ` + statement { + sid = "PublishToCloudWatch" # tfsec:ignore:aws-iam-enforce-mfa + actions = [ + "cloudwatch:PutMetricData", # tfsec:ignore:aws-iam-enforce-mfa + ] + resources = ["*"] # tfsec:ignore:aws-iam-enforce-mfa + } +} +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreAll", + inputOptions: ` resource "bad" "my-rule" { secure = false // trivy:ignore:* } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlock", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlock", + inputOptions: ` // trivy:ignore:* resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockMatchingParamBool", + inputOptions: ` // trivy:ignore:*[secure=false] resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamBool", + inputOptions: ` // trivy:ignore:*[secure=true] resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "TrivyIgnoreLineAboveTheBlockMatchingParamString", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreLineAboveTheBlockMatchingParamString", + inputOptions: ` // trivy:ignore:*[name=myrule] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamString", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamString", + inputOptions: ` // trivy:ignore:*[name=myrule2] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 1}, - {name: "TrivyIgnoreLineAboveTheBlockMatchingParamInt", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreLineAboveTheBlockMatchingParamInt", + inputOptions: ` // trivy:ignore:*[port=123] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamInt", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamInt", + inputOptions: ` // trivy:ignore:*[port=456] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 1}, - {name: "TrivyIgnoreLineStackedAboveTheBlock", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "ignore by nested attribute", + inputOptions: ` +// trivy:ignore:*[secure_settings.enabled=false] +resource "bad" "my-rule" { + secure_settings { + enabled = false + } +} +`, + assertLength: 0, + }, + { + name: "ignore by nested attribute of another type", + inputOptions: ` +// trivy:ignore:*[secure_settings.enabled=1] +resource "bad" "my-rule" { + secure_settings { + enabled = false + } +} +`, + assertLength: 1, + }, + { + name: "ignore by non-existent nested attribute", + inputOptions: ` +// trivy:ignore:*[secure_settings.rule=myrule] +resource "bad" "my-rule" { + secure_settings { + enabled = false + } +} +`, + assertLength: 1, + }, + { + name: "ignore resource with `for_each` meta-argument", + inputOptions: ` +// trivy:ignore:*[secure=false] +resource "bad" "my-rule" { + for_each = toset(["false", "true", "false"]) + secure = each.key +} +`, + assertLength: 0, + }, + { + name: "ignore by dynamic block value", + inputOptions: ` +// trivy:ignore:*[secure_settings.enabled=false] +resource "bad" "my-rule" { + dynamic "secure_settings" { + for_each = ["false", "true"] + content { + enabled = secure_settings.value + } + } +} +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlock", + inputOptions: ` // trivy:ignore:* // trivy:ignore:a // trivy:ignore:b @@ -273,8 +451,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineStackedAboveTheBlockWithoutMatch", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlockWithoutMatch", + inputOptions: ` #trivy:ignore:* #trivy:ignore:x @@ -285,8 +467,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "TrivyIgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", + inputOptions: ` #trivy:ignore:* #trivy:ignore:a #trivy:ignore:b @@ -295,8 +481,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineStackedAboveTheBlockWithoutSpaces", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlockWithoutSpaces", + inputOptions: ` //trivy:ignore:* //trivy:ignore:a //trivy:ignore:b @@ -305,83 +495,114 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheLine", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheLine", + inputOptions: ` resource "bad" "my-rule" { # trivy:ignore:aws-service-abc123 secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreWithExpDateIfDateBreachedThenDontIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreWithExpDateIfDateBreachedThenDontIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # trivy:ignore:aws-service-abc123:exp:2000-01-02 } -`, assertLength: 1}, - {name: "TrivyIgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # trivy:ignore:aws-service-abc123:exp:2221-01-02 } -`, assertLength: 0}, - {name: "TrivyIgnoreWithExpDateIfDateInvalidThenDropTheIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreWithExpDateIfDateInvalidThenDropTheIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # trivy:ignore:aws-service-abc123:exp:2221-13-02 } -`, assertLength: 1}, - {name: "TrivyIgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` #trivy:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "TrivyIgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` # trivy:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "TrivyIgnoreForImpliedIAMResource", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreForImpliedIAMResource", + inputOptions: ` terraform { -required_version = "~> 1.1.6" + required_version = "~> 1.1.6" -required_providers { -aws = { -source = "hashicorp/aws" -version = "~> 3.48" -} -} + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.48" + } + } } # Retrieve an IAM group defined outside of this Terraform config. # trivy:ignore:aws-iam-enforce-mfa data "aws_iam_group" "externally_defined_group" { -group_name = "group-name" # trivy:ignore:aws-iam-enforce-mfa + group_name = "group-name" # trivy:ignore:aws-iam-enforce-mfa } # Create an IAM policy and attach it to the group. # trivy:ignore:aws-iam-enforce-mfa resource "aws_iam_policy" "test_policy" { -name = "test-policy" # trivy:ignore:aws-iam-enforce-mfa -policy = data.aws_iam_policy_document.test_policy.json # trivy:ignore:aws-iam-enforce-mfa + name = "test-policy" # trivy:ignore:aws-iam-enforce-mfa + policy = data.aws_iam_policy_document.test_policy.json # trivy:ignore:aws-iam-enforce-mfa } # trivy:ignore:aws-iam-enforce-mfa resource "aws_iam_group_policy_attachment" "test_policy_attachment" { -group = data.aws_iam_group.externally_defined_group.group_name # trivy:ignore:aws-iam-enforce-mfa -policy_arn = aws_iam_policy.test_policy.arn # trivy:ignore:aws-iam-enforce-mfa + group = data.aws_iam_group.externally_defined_group.group_name # trivy:ignore:aws-iam-enforce-mfa + policy_arn = aws_iam_policy.test_policy.arn # trivy:ignore:aws-iam-enforce-mfa } # trivy:ignore:aws-iam-enforce-mfa data "aws_iam_policy_document" "test_policy" { -statement { -sid = "PublishToCloudWatch" # trivy:ignore:aws-iam-enforce-mfa -actions = [ -"cloudwatch:PutMetricData", # trivy:ignore:aws-iam-enforce-mfa -] -resources = ["*"] # trivy:ignore:aws-iam-enforce-mfa -} -} -`, assertLength: 0}} + statement { + sid = "PublishToCloudWatch" # trivy:ignore:aws-iam-enforce-mfa + actions = [ + "cloudwatch:PutMetricData", # trivy:ignore:aws-iam-enforce-mfa + ] + resources = ["*"] # trivy:ignore:aws-iam-enforce-mfa + } +} +`, + assertLength: 0, + }, + } reg := rules.Register(exampleRule) defer rules.Deregister(reg) @@ -394,16 +615,53 @@ resources = ["*"] # trivy:ignore:aws-iam-enforce-mfa } } -func Test_IgnoreIgnoreWithExpiryAndWorkspaceAndWorkspaceSupplied(t *testing.T) { +func Test_IgnoreByWorkspace(t *testing.T) { reg := rules.Register(exampleRule) defer rules.Deregister(reg) - results := scanHCLWithWorkspace(t, ` -# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace -resource "bad" "my-rule" { -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 0) + tests := []struct { + name string + src string + expectedFailed int + }{ + { + name: "with expiry and workspace", + src: `# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 0, + }, + { + name: "bad workspace", + src: `# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 1, + }, + { + name: "with expiry and workspace, trivy prefix", + src: `# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 0, + }, + { + name: "bad workspace, trivy prefix", + src: `# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 1, + }, + { + name: "workspace with wildcard", + src: `# tfsec:ignore:*:ws:test* +resource "bad" "my-rule" {}`, + expectedFailed: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + results := scanHCLWithWorkspace(t, tt.src, "testworkspace") + assert.Len(t, results.GetFailed(), tt.expectedFailed) + }) + } } func Test_IgnoreInline(t *testing.T) { @@ -418,19 +676,6 @@ func Test_IgnoreInline(t *testing.T) { assert.Len(t, results.GetFailed(), 0) } -func Test_IgnoreIgnoreWithExpiryAndWorkspaceButWrongWorkspaceSupplied(t *testing.T) { - reg := rules.Register(exampleRule) - defer rules.Deregister(reg) - - results := scanHCLWithWorkspace(t, ` -# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace -resource "bad" "my-rule" { - -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 1) -} - func Test_IgnoreWithAliasCodeStillIgnored(t *testing.T) { reg := rules.Register(exampleRule) defer rules.Deregister(reg) @@ -444,31 +689,6 @@ resource "bad" "my-rule" { assert.Len(t, results.GetFailed(), 0) } -func Test_TrivyIgnoreIgnoreWithExpiryAndWorkspaceAndWorkspaceSupplied(t *testing.T) { - reg := rules.Register(exampleRule) - defer rules.Deregister(reg) - - results := scanHCLWithWorkspace(t, ` -# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace -resource "bad" "my-rule" { -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 0) -} - -func Test_TrivyIgnoreIgnoreWithExpiryAndWorkspaceButWrongWorkspaceSupplied(t *testing.T) { - reg := rules.Register(exampleRule) - defer rules.Deregister(reg) - - results := scanHCLWithWorkspace(t, ` -# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace -resource "bad" "my-rule" { - -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 1) -} - func Test_TrivyIgnoreWithAliasCodeStillIgnored(t *testing.T) { reg := rules.Register(exampleRule) defer rules.Deregister(reg) diff --git a/pkg/iac/scanners/terraform/module_test.go b/pkg/iac/scanners/terraform/module_test.go index ffed34718156..61b1a0e359f6 100644 --- a/pkg/iac/scanners/terraform/module_test.go +++ b/pkg/iac/scanners/terraform/module_test.go @@ -13,7 +13,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/severity" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/require" @@ -88,13 +88,15 @@ resource "problem" "uhoh" { debug := bytes.NewBuffer([]byte{}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true), options.ParserWithDebug(debug)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(debug)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, err := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") if t.Failed() { fmt.Println(debug.String()) @@ -119,12 +121,15 @@ resource "problem" "uhoh" { `}, ) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -148,12 +153,15 @@ resource "problem" "uhoh" { `}, ) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleNotFound(t, badRule.LongID(), results, "") } @@ -175,12 +183,15 @@ resource "problem" "uhoh" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -202,12 +213,15 @@ resource "problem" "uhoh" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -238,12 +252,15 @@ resource "problem" "uhoh" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -276,12 +293,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -331,12 +351,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -380,12 +403,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -418,12 +444,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -473,12 +502,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -523,12 +555,15 @@ resource "bad" "thing" { reg := rules.Register(r1) defer rules.Deregister(reg) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, r1.LongID(), results, "") } @@ -572,12 +607,15 @@ resource "bad" "thing" { reg := rules.Register(r1) defer rules.Deregister(reg) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleNotFound(t, r1.LongID(), results, "") } @@ -621,12 +659,15 @@ data "aws_iam_policy_document" "policy" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleNotFound(t, iam.CheckEnforceGroupMFA.LongID(), results, "") } diff --git a/pkg/iac/scanners/terraform/options.go b/pkg/iac/scanners/terraform/options.go index 2dddb856c049..f5a0d2223534 100644 --- a/pkg/iac/scanners/terraform/options.go +++ b/pkg/iac/scanners/terraform/options.go @@ -8,8 +8,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" - "github.com/aquasecurity/trivy/pkg/iac/severity" - "github.com/aquasecurity/trivy/pkg/iac/state" ) type ConfigurableTerraformScanner interface { @@ -27,62 +25,6 @@ func ScannerWithTFVarsPaths(paths ...string) options.ScannerOption { } } -func ScannerWithAlternativeIDProvider(f func(string) []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithAlternativeIDProvider(f)) - } - } -} - -func ScannerWithSeverityOverrides(overrides map[string]string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithSeverityOverrides(overrides)) - } - } -} - -func ScannerWithNoIgnores() options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionNoIgnores()) - } - } -} - -func ScannerWithExcludedRules(ruleIDs []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionExcludeRules(ruleIDs)) - } - } -} - -func ScannerWithExcludeIgnores(ruleIDs []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionExcludeIgnores(ruleIDs)) - } - } -} - -func ScannerWithIncludedRules(ruleIDs []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionIncludeRules(ruleIDs)) - } - } -} - -func ScannerWithStopOnRuleErrors(stop bool) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionStopOnErrors(stop)) - } - } -} - func ScannerWithWorkspaceName(name string) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { @@ -92,14 +34,6 @@ func ScannerWithWorkspaceName(name string) options.ScannerOption { } } -func ScannerWithSingleThread(single bool) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithSingleThread(single)) - } - } -} - func ScannerWithAllDirectories(all bool) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { @@ -108,14 +42,6 @@ func ScannerWithAllDirectories(all bool) options.ScannerOption { } } -func ScannerWithStopOnHCLError(stop bool) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddParserOptions(parser.OptionStopOnHCLError(stop)) - } - } -} - func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { return func(s options.ConfigurableScanner) { if !skip { @@ -125,10 +51,7 @@ func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { for i, result := range results { prefix := result.Range().GetSourcePrefix() - switch { - case prefix == "": - case strings.HasPrefix(prefix, "."): - default: + if prefix != "" && !strings.HasPrefix(prefix, ".") { results[i].OverrideStatus(scan.StatusIgnored) } } @@ -138,53 +61,6 @@ func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { } } -func ScannerWithResultsFilter(f func(scan.Results) scan.Results) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithResultsFilter(f)) - } - } -} - -func ScannerWithMinimumSeverity(minimum severity.Severity) options.ScannerOption { - min := severityAsOrdinal(minimum) - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { - for i, result := range results { - if severityAsOrdinal(result.Severity()) < min { - results[i].OverrideStatus(scan.StatusIgnored) - } - } - return results - })) - } - } -} - -func severityAsOrdinal(sev severity.Severity) int { - switch sev { - case severity.Critical: - return 4 - case severity.High: - return 3 - case severity.Medium: - return 2 - case severity.Low: - return 1 - default: - return 0 - } -} - -func ScannerWithStateFunc(f ...func(*state.State)) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithStateFunc(f...)) - } - } -} - func ScannerWithDownloadsAllowed(allowed bool) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { diff --git a/pkg/iac/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go index 1fe9a72fdcac..b93104f442cc 100644 --- a/pkg/iac/scanners/terraform/parser/evaluator.go +++ b/pkg/iac/scanners/terraform/parser/evaluator.go @@ -5,15 +5,16 @@ import ( "errors" "io/fs" "reflect" - "time" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/typeexpr" + "github.com/samber/lo" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "golang.org/x/exp/slices" "github.com/aquasecurity/trivy/pkg/iac/debug" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/terraform" tfcontext "github.com/aquasecurity/trivy/pkg/iac/terraform/context" "github.com/aquasecurity/trivy/pkg/iac/types" @@ -32,7 +33,7 @@ type evaluator struct { projectRootPath string // root of the current scan modulePath string moduleName string - ignores terraform.Ignores + ignores ignore.Rules parentParser *Parser debug debug.Logger allowDownloads bool @@ -50,7 +51,7 @@ func newEvaluator( inputVars map[string]cty.Value, moduleMetadata *modulesMetadata, workspace string, - ignores []terraform.Ignore, + ignores ignore.Rules, logger debug.Logger, allowDownloads bool, skipCachedModules bool, @@ -102,6 +103,7 @@ func (e *evaluator) evaluateStep() { e.ctx.Set(e.getValuesByBlockType("data"), "data") e.ctx.Set(e.getValuesByBlockType("output"), "output") + e.ctx.Set(e.getValuesByBlockType("module"), "module") } // exportOutputs is used to export module outputs to the parent module @@ -118,7 +120,7 @@ func (e *evaluator) exportOutputs() cty.Value { return cty.ObjectVal(data) } -func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[string]fs.FS, time.Duration) { +func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[string]fs.FS) { fsKey := types.CreateFSKey(e.filesystem) e.debug.Log("Filesystem key is '%s'", fsKey) @@ -126,53 +128,100 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str fsMap := make(map[string]fs.FS) fsMap[fsKey] = e.filesystem - var parseDuration time.Duration - - var lastContext hcl.EvalContext - start := time.Now() e.debug.Log("Starting module evaluation...") - for i := 0; i < maxContextIterations; i++ { + e.evaluateSteps() - e.evaluateStep() + // expand out resources and modules via count, for-each and dynamic + // (not a typo, we do this twice so every order is processed) + e.blocks = e.expandBlocks(e.blocks) + e.blocks = e.expandBlocks(e.blocks) - // if ctx matches the last evaluation, we can bail, nothing left to resolve - if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { - break - } + e.debug.Log("Starting submodule evaluation...") + submodules := e.loadSubmodules(ctx) - if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { - lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) + for i := 0; i < maxContextIterations; i++ { + changed := false + for _, sm := range submodules { + changed = changed || e.evaluateSubmodule(ctx, sm) } - for k, v := range e.ctx.Inner().Variables { - lastContext.Variables[k] = v + if !changed { + e.debug.Log("All submodules are evaluated at i=%d", i) + break } } - // expand out resources and modules via count, for-each and dynamic - // (not a typo, we do this twice so every order is processed) - e.blocks = e.expandBlocks(e.blocks) - e.blocks = e.expandBlocks(e.blocks) - - parseDuration += time.Since(start) + e.debug.Log("Starting post-submodule evaluation...") + e.evaluateSteps() - e.debug.Log("Starting submodule evaluation...") var modules terraform.Modules + for _, sm := range submodules { + modules = append(modules, sm.modules...) + fsMap = lo.Assign(fsMap, sm.fsMap) + } + + e.debug.Log("Finished processing %d submodule(s).", len(modules)) + + e.debug.Log("Module evaluation complete.") + rootModule := terraform.NewModule(e.projectRootPath, e.modulePath, e.blocks, e.ignores) + return append(terraform.Modules{rootModule}, modules...), fsMap +} + +type submodule struct { + definition *ModuleDefinition + eval *evaluator + modules terraform.Modules + lastState map[string]cty.Value + fsMap map[string]fs.FS +} + +func (e *evaluator) loadSubmodules(ctx context.Context) []*submodule { + var submodules []*submodule + for _, definition := range e.loadModules(ctx) { - submodules, outputs, err := definition.Parser.EvaluateAll(ctx) - if err != nil { - e.debug.Log("Failed to evaluate submodule '%s': %s.", definition.Name, err) + eval, err := definition.Parser.Load(ctx) + if errors.Is(err, ErrNoFiles) { + continue + } else if err != nil { + e.debug.Log("Failed to load submodule '%s': %s.", definition.Name, err) continue } - // export module outputs - e.ctx.Set(outputs, "module", definition.Name) - modules = append(modules, submodules...) - for key, val := range definition.Parser.GetFilesystemMap() { - fsMap[key] = val + + submodules = append(submodules, &submodule{ + definition: definition, + eval: eval, + fsMap: make(map[string]fs.FS), + }) + } + + return submodules +} + +func (e *evaluator) evaluateSubmodule(ctx context.Context, sm *submodule) bool { + inputVars := sm.definition.inputVars() + if len(sm.modules) > 0 { + if reflect.DeepEqual(inputVars, sm.lastState) { + e.debug.Log("Submodule %s inputs unchanged", sm.definition.Name) + return false } } - e.debug.Log("Finished processing %d submodule(s).", len(modules)) - e.debug.Log("Starting post-submodule evaluation...") + e.debug.Log("Evaluating submodule %s", sm.definition.Name) + sm.eval.inputVars = inputVars + sm.modules, sm.fsMap = sm.eval.EvaluateAll(ctx) + outputs := sm.eval.exportOutputs() + + // lastState needs to be captured after applying outputs – so that they + // don't get treated as changes – but before running post-submodule + // evaluation, so that changes from that can trigger re-evaluations of + // the submodule if/when they feed back into inputs. + e.ctx.Set(outputs, "module", sm.definition.Name) + sm.lastState = sm.definition.inputVars() + e.evaluateSteps() + return true +} + +func (e *evaluator) evaluateSteps() { + var lastContext hcl.EvalContext for i := 0; i < maxContextIterations; i++ { e.evaluateStep() @@ -181,7 +230,6 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { break } - if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) } @@ -189,11 +237,6 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str lastContext.Variables[k] = v } } - - e.debug.Log("Module evaluation complete.") - parseDuration += time.Since(start) - rootModule := terraform.NewModule(e.projectRootPath, e.modulePath, e.blocks, e.ignores) - return append(terraform.Modules{rootModule}, modules...), fsMap, parseDuration } func (e *evaluator) expandBlocks(blocks terraform.Blocks) terraform.Blocks { @@ -223,7 +266,9 @@ func (e *evaluator) expandDynamicBlock(b *terraform.Block) { b.InjectBlock(content, blockName) } } - sub.MarkExpanded() + if len(expanded) > 0 { + sub.MarkExpanded() + } } } @@ -252,6 +297,10 @@ func (e *evaluator) expandBlockForEaches(blocks terraform.Blocks, isDynamic bool clones := make(map[string]cty.Value) _ = forEachAttr.Each(func(key cty.Value, val cty.Value) { + if val.IsNull() { + return + } + // instances are identified by a map key (or set member) from the value provided to for_each idx, err := convert.Convert(key, cty.String) if err != nil { diff --git a/pkg/iac/scanners/terraform/parser/load_blocks.go b/pkg/iac/scanners/terraform/parser/load_blocks.go deleted file mode 100644 index c5409d42f27b..000000000000 --- a/pkg/iac/scanners/terraform/parser/load_blocks.go +++ /dev/null @@ -1,131 +0,0 @@ -package parser - -import ( - "fmt" - "regexp" - "strings" - "time" - - "github.com/hashicorp/hcl/v2" - - "github.com/aquasecurity/trivy/pkg/iac/terraform" - "github.com/aquasecurity/trivy/pkg/iac/types" -) - -func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []terraform.Ignore, error) { - ignores := parseIgnores(file.file.Bytes, file.path, moduleSource) - contents, diagnostics := file.file.Body.Content(terraform.Schema) - if diagnostics != nil && diagnostics.HasErrors() { - return nil, nil, diagnostics - } - if contents == nil { - return nil, nil, nil - } - return contents.Blocks, ignores, nil -} - -func parseIgnores(data []byte, path, moduleSource string) []terraform.Ignore { - var ignores []terraform.Ignore - for i, line := range strings.Split(string(data), "\n") { - line = strings.TrimSpace(line) - lineIgnores := parseIgnoresFromLine(line) - for _, lineIgnore := range lineIgnores { - lineIgnore.Range = types.NewRange(path, i+1, i+1, moduleSource, nil) - ignores = append(ignores, lineIgnore) - } - } - for a, ignoreA := range ignores { - if !ignoreA.Block { - continue - } - for _, ignoreB := range ignores { - if !ignoreB.Block { - continue - } - if ignoreA.Range.GetStartLine()+1 == ignoreB.Range.GetStartLine() { - ignoreA.Range = ignoreB.Range - ignores[a] = ignoreA - } - } - } - return ignores - -} - -var commentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*tfsec:`) -var trivyCommentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*trivy:`) - -func parseIgnoresFromLine(input string) []terraform.Ignore { - - var ignores []terraform.Ignore - - input = commentPattern.ReplaceAllString(input, "tfsec:") - input = trivyCommentPattern.ReplaceAllString(input, "trivy:") - - bits := strings.Split(strings.TrimSpace(input), " ") - for i, bit := range bits { - bit := strings.TrimSpace(bit) - bit = strings.TrimPrefix(bit, "#") - bit = strings.TrimPrefix(bit, "//") - bit = strings.TrimPrefix(bit, "/*") - - if strings.HasPrefix(bit, "tfsec:") || strings.HasPrefix(bit, "trivy:") { - ignore, err := parseIgnoreFromComment(bit) - if err != nil { - continue - } - ignore.Block = i == 0 - ignores = append(ignores, *ignore) - } - } - - return ignores -} - -func parseIgnoreFromComment(input string) (*terraform.Ignore, error) { - var ignore terraform.Ignore - if !strings.HasPrefix(input, "tfsec:") && !strings.HasPrefix(input, "trivy:") { - return nil, fmt.Errorf("invalid ignore") - } - - input = input[6:] - - segments := strings.Split(input, ":") - - for i := 0; i < len(segments)-1; i += 2 { - key := segments[i] - val := segments[i+1] - switch key { - case "ignore": - ignore.RuleID, ignore.Params = parseIDWithParams(val) - case "exp": - parsed, err := time.Parse("2006-01-02", val) - if err != nil { - return &ignore, err - } - ignore.Expiry = &parsed - case "ws": - ignore.Workspace = val - } - } - - return &ignore, nil -} - -func parseIDWithParams(input string) (string, map[string]string) { - params := make(map[string]string) - if !strings.Contains(input, "[") { - return input, params - } - parts := strings.Split(input, "[") - id := parts[0] - paramStr := strings.TrimSuffix(parts[1], "]") - for _, pair := range strings.Split(paramStr, ",") { - parts := strings.Split(pair, "=") - if len(parts) != 2 { - continue - } - params[parts[0]] = parts[1] - } - return id, params -} diff --git a/pkg/iac/scanners/terraform/parser/load_blocks_test.go b/pkg/iac/scanners/terraform/parser/load_blocks_test.go deleted file mode 100644 index e32d19a75044..000000000000 --- a/pkg/iac/scanners/terraform/parser/load_blocks_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package parser - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestParsingDoubleComment(t *testing.T) { - ignores := parseIgnoresFromLine("## tfsec:ignore:abc") - assert.Equal(t, 1, len(ignores)) - assert.Truef(t, ignores[0].Block, "Expected ignore to be a block") -} diff --git a/pkg/iac/scanners/terraform/parser/load_module.go b/pkg/iac/scanners/terraform/parser/load_module.go index 461d7a7a1a56..0bd6a6395936 100644 --- a/pkg/iac/scanners/terraform/parser/load_module.go +++ b/pkg/iac/scanners/terraform/parser/load_module.go @@ -22,6 +22,14 @@ type ModuleDefinition struct { External bool } +func (d *ModuleDefinition) inputVars() map[string]cty.Value { + inputs := d.Definition.Values().AsValueMap() + if inputs == nil { + return make(map[string]cty.Value) + } + return inputs +} + // loadModules reads all module blocks and loads them func (e *evaluator) loadModules(ctx context.Context) []*ModuleDefinition { var moduleDefinitions []*ModuleDefinition diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go index e09e9e621ef4..b5b50dc913d7 100644 --- a/pkg/iac/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -2,6 +2,7 @@ package parser import ( "context" + "errors" "io" "io/fs" "os" @@ -9,7 +10,6 @@ import ( "path/filepath" "sort" "strings" - "time" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" @@ -17,6 +17,7 @@ import ( "github.com/aquasecurity/trivy/pkg/extrafs" "github.com/aquasecurity/trivy/pkg/iac/debug" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/terraform" tfcontext "github.com/aquasecurity/trivy/pkg/iac/terraform/context" @@ -27,19 +28,6 @@ type sourceFile struct { path string } -type Metrics struct { - Timings struct { - DiskIODuration time.Duration - ParseDuration time.Duration - } - Counts struct { - Blocks int - Modules int - ModuleDownloads int - Files int - } -} - var _ ConfigurableTerraformParser = (*Parser)(nil) // Parser is a tool for parsing terraform templates at a given file system location @@ -56,7 +44,6 @@ type Parser struct { workspaceName string underlying *hclparse.Parser children []*Parser - metrics Metrics options []options.ParserOption debug debug.Logger allowDownloads bool @@ -131,21 +118,7 @@ func (p *Parser) newModuleParser(moduleFS fs.FS, moduleSource, modulePath, modul return mp } -func (p *Parser) Metrics() Metrics { - total := p.metrics - for _, child := range p.children { - metrics := child.Metrics() - total.Counts.Files += metrics.Counts.Files - total.Counts.Blocks += metrics.Counts.Blocks - total.Timings.ParseDuration += metrics.Timings.ParseDuration - total.Timings.DiskIODuration += metrics.Timings.DiskIODuration - // NOTE: we don't add module count - this has already propagated to the top level - } - return total -} - func (p *Parser) ParseFile(_ context.Context, fullPath string) error { - diskStart := time.Now() isJSON := strings.HasSuffix(fullPath, ".tf.json") isHCL := strings.HasSuffix(fullPath, ".tf") @@ -164,14 +137,13 @@ func (p *Parser) ParseFile(_ context.Context, fullPath string) error { if err != nil { return err } - p.metrics.Timings.DiskIODuration += time.Since(diskStart) + if dir := path.Dir(fullPath); p.projectRoot == "" { p.debug.Log("Setting project/module root to '%s'", dir) p.projectRoot = dir p.modulePath = dir } - start := time.Now() var file *hcl.File var diag hcl.Diagnostics @@ -187,8 +159,7 @@ func (p *Parser) ParseFile(_ context.Context, fullPath string) error { file: file, path: fullPath, }) - p.metrics.Counts.Files++ - p.metrics.Timings.ParseDuration += time.Since(start) + p.debug.Log("Added file %s.", fullPath) return nil } @@ -254,23 +225,22 @@ func (p *Parser) ParseFS(ctx context.Context, dir string) error { return nil } -func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, error) { +var ErrNoFiles = errors.New("no files found") +func (p *Parser) Load(ctx context.Context) (*evaluator, error) { p.debug.Log("Evaluating module...") if len(p.files) == 0 { p.debug.Log("No files found, nothing to do.") - return nil, cty.NilVal, nil + return nil, ErrNoFiles } blocks, ignores, err := p.readBlocks(p.files) if err != nil { - return nil, cty.NilVal, err + return nil, err } p.debug.Log("Read %d block(s) and %d ignore(s) for module '%s' (%d file[s])...", len(blocks), len(ignores), p.moduleName, len(p.files)) - p.metrics.Counts.Blocks = len(blocks) - var inputVars map[string]cty.Value if p.moduleBlock != nil { inputVars = p.moduleBlock.Values().AsValueMap() @@ -278,7 +248,7 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, } else { inputVars, err = loadTFVars(p.configsFS, p.tfvarsPaths) if err != nil { - return nil, cty.NilVal, err + return nil, err } p.debug.Log("Added %d variables from tfvars.", len(inputVars)) } @@ -292,10 +262,10 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, workingDir, err := os.Getwd() if err != nil { - return nil, cty.NilVal, err + return nil, err } p.debug.Log("Working directory for module evaluation is '%s'", workingDir) - evaluator := newEvaluator( + return newEvaluator( p.moduleFS, p, p.projectRoot, @@ -310,13 +280,19 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, p.debug.Extend("evaluator"), p.allowDownloads, p.skipCachedModules, - ) - modules, fsMap, parseDuration := evaluator.EvaluateAll(ctx) - p.metrics.Counts.Modules = len(modules) - p.metrics.Timings.ParseDuration = parseDuration + ), nil +} + +func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, error) { + + e, err := p.Load(ctx) + if errors.Is(err, ErrNoFiles) { + return nil, cty.NilVal, nil + } + modules, fsMap := e.EvaluateAll(ctx) p.debug.Log("Finished parsing module '%s'.", p.moduleName) p.fsMap = fsMap - return modules, evaluator.exportOutputs(), nil + return modules, e.exportOutputs(), nil } func (p *Parser) GetFilesystemMap() map[string]fs.FS { @@ -326,12 +302,12 @@ func (p *Parser) GetFilesystemMap() map[string]fs.FS { return p.fsMap } -func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, terraform.Ignores, error) { +func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, ignore.Rules, error) { var blocks terraform.Blocks - var ignores terraform.Ignores + var ignores ignore.Rules moduleCtx := tfcontext.NewContext(&hcl.EvalContext{}, nil) for _, file := range files { - fileBlocks, fileIgnores, err := loadBlocksFromFile(file, p.moduleSource) + fileBlocks, err := loadBlocksFromFile(file) if err != nil { if p.stopOnHCLError { return nil, nil, err @@ -342,9 +318,61 @@ func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, terraform.Ign for _, fileBlock := range fileBlocks { blocks = append(blocks, terraform.NewBlock(fileBlock, moduleCtx, p.moduleBlock, nil, p.moduleSource, p.moduleFS)) } + fileIgnores := ignore.Parse( + string(file.file.Bytes), + file.path, + &ignore.StringMatchParser{ + SectionKey: "ws", + }, + ¶mParser{}, + ) ignores = append(ignores, fileIgnores...) } sortBlocksByHierarchy(blocks) return blocks, ignores, nil } + +func loadBlocksFromFile(file sourceFile) (hcl.Blocks, error) { + contents, diagnostics := file.file.Body.Content(terraform.Schema) + if diagnostics != nil && diagnostics.HasErrors() { + return nil, diagnostics + } + if contents == nil { + return nil, nil + } + return contents.Blocks, nil +} + +type paramParser struct { + params map[string]string +} + +func (s *paramParser) Key() string { + return "ignore" +} + +func (s *paramParser) Parse(str string) bool { + s.params = make(map[string]string) + + idx := strings.Index(str, "[") + if idx == -1 { + return false + } + + str = str[idx+1:] + + paramStr := strings.TrimSuffix(str, "]") + for _, pair := range strings.Split(paramStr, ",") { + parts := strings.Split(pair, "=") + if len(parts) != 2 { + continue + } + s.params[parts[0]] = parts[1] + } + return true +} + +func (s *paramParser) Param() any { + return s.params +} diff --git a/pkg/iac/scanners/terraform/parser/parser_test.go b/pkg/iac/scanners/terraform/parser/parser_test.go index 12594841251b..a20bb2a84b58 100644 --- a/pkg/iac/scanners/terraform/parser/parser_test.go +++ b/pkg/iac/scanners/terraform/parser/parser_test.go @@ -1522,3 +1522,112 @@ func compareSets(a []int, b []int) bool { return true } + +func TestModuleRefersToOutputOfAnotherModule(t *testing.T) { + files := map[string]string{ + "main.tf": ` +module "module2" { + source = "./modules/foo" +} + +module "module1" { + source = "./modules/bar" + test_var = module.module2.test_out +} +`, + "modules/foo/main.tf": ` +output "test_out" { + value = "test_value" +} +`, + "modules/bar/main.tf": ` +variable "test_var" {} + +resource "test_resource" "this" { + dynamic "dynamic_block" { + for_each = [var.test_var] + content { + some_attr = dynamic_block.value + } + } +} +`, + } + + modules := parse(t, files) + require.Len(t, modules, 3) + + resources := modules.GetResourcesByType("test_resource") + require.Len(t, resources, 1) + + attr, _ := resources[0].GetNestedAttribute("dynamic_block.some_attr") + require.NotNil(t, attr) + + assert.Equal(t, "test_value", attr.GetRawValue()) +} + +func TestCyclicModules(t *testing.T) { + files := map[string]string{ + "main.tf": ` +module "module2" { + source = "./modules/foo" + test_var = passthru.handover.from_1 +} + +// Demonstrates need for evaluateSteps between submodule evaluations. +resource "passthru" "handover" { + from_1 = module.module1.test_out + from_2 = module.module2.test_out +} + +module "module1" { + source = "./modules/bar" + test_var = passthru.handover.from_2 +} +`, + "modules/foo/main.tf": ` +variable "test_var" {} + +resource "test_resource" "this" { + dynamic "dynamic_block" { + for_each = [var.test_var] + content { + some_attr = dynamic_block.value + } + } +} + +output "test_out" { + value = "test_value" +} +`, + "modules/bar/main.tf": ` +variable "test_var" {} + +resource "test_resource" "this" { + dynamic "dynamic_block" { + for_each = [var.test_var] + content { + some_attr = dynamic_block.value + } + } +} + +output "test_out" { + value = test_resource.this.dynamic_block.some_attr +} +`, + } + + modules := parse(t, files) + require.Len(t, modules, 3) + + resources := modules.GetResourcesByType("test_resource") + require.Len(t, resources, 2) + + for _, res := range resources { + attr, _ := res.GetNestedAttribute("dynamic_block.some_attr") + require.NotNil(t, attr, res.FullName()) + assert.Equal(t, "test_value", attr.GetRawValue()) + } +} diff --git a/pkg/iac/scanners/terraform/performance_test.go b/pkg/iac/scanners/terraform/performance_test.go index f4a390a3b2cc..9015aa25b076 100644 --- a/pkg/iac/scanners/terraform/performance_test.go +++ b/pkg/iac/scanners/terraform/performance_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/testutil" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" ) func BenchmarkCalculate(b *testing.B) { @@ -21,7 +21,7 @@ func BenchmarkCalculate(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - p := parser2.New(f, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(f, "", parser.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "project"); err != nil { b.Fatal(err) } @@ -29,7 +29,7 @@ func BenchmarkCalculate(b *testing.B) { if err != nil { b.Fatal(err) } - _, _, _ = executor.New().Execute(modules) + executor.New().Execute(modules) } } diff --git a/pkg/iac/scanners/terraform/scanner.go b/pkg/iac/scanners/terraform/scanner.go index 5176b6471355..f5a3554d002d 100644 --- a/pkg/iac/scanners/terraform/scanner.go +++ b/pkg/iac/scanners/terraform/scanner.go @@ -10,7 +10,6 @@ import ( "sort" "strings" "sync" - "time" "github.com/aquasecurity/trivy/pkg/extrafs" "github.com/aquasecurity/trivy/pkg/iac/debug" @@ -21,7 +20,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -120,14 +118,6 @@ func (s *Scanner) SetDataFilesystem(_ fs.FS) { } func (s *Scanner) SetRegoErrorLimit(_ int) {} -type Metrics struct { - Parser parser.Metrics - Executor executor.Metrics - Timings struct { - Total time.Duration - } -} - func New(opts ...options.ScannerOption) *Scanner { s := &Scanner{ dirs: make(map[string]struct{}), @@ -139,11 +129,6 @@ func New(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { - results, _, err := s.ScanFSWithMetrics(ctx, target, dir) - return results, err -} - func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { s.Lock() defer s.Unlock() @@ -167,8 +152,7 @@ type terraformRootModule struct { fsMap map[string]fs.FS } -func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir string) (scan.Results, Metrics, error) { - var metrics Metrics +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { s.debug.Log("Scanning [%s] at '%s'...", target, dir) @@ -178,12 +162,12 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin if len(modulePaths) == 0 { s.debug.Log("no modules found") - return nil, metrics, nil + return nil, nil } regoScanner, err := s.initRegoScanner(target) if err != nil { - return nil, metrics, err + return nil, err } s.execLock.Lock() @@ -195,7 +179,7 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin p := parser.New(target, "", s.parserOpt...) rootDirs, err := p.FindRootModules(ctx, modulePaths) if err != nil { - return nil, metrics, fmt.Errorf("failed to find root modules: %w", err) + return nil, fmt.Errorf("failed to find root modules: %w", err) } rootModules := make([]terraformRootModule, 0, len(rootDirs)) @@ -208,21 +192,14 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin p := parser.New(target, "", s.parserOpt...) if err := p.ParseFS(ctx, dir); err != nil { - return nil, metrics, err + return nil, err } modules, _, err := p.EvaluateAll(ctx) if err != nil { - return nil, metrics, err + return nil, err } - parserMetrics := p.Metrics() - metrics.Parser.Counts.Blocks += parserMetrics.Counts.Blocks - metrics.Parser.Counts.Modules += parserMetrics.Counts.Modules - metrics.Parser.Counts.Files += parserMetrics.Counts.Files - metrics.Parser.Timings.DiskIODuration += parserMetrics.Timings.DiskIODuration - metrics.Parser.Timings.ParseDuration += parserMetrics.Timings.ParseDuration - rootModules = append(rootModules, terraformRootModule{ rootPath: dir, childs: modules, @@ -234,9 +211,9 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin s.execLock.RLock() e := executor.New(s.executorOpt...) s.execLock.RUnlock() - results, execMetrics, err := e.Execute(module.childs) + results, err := e.Execute(module.childs) if err != nil { - return nil, metrics, err + return nil, err } for i, result := range results { @@ -256,27 +233,10 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin } } - metrics.Executor.Counts.Passed += execMetrics.Counts.Passed - metrics.Executor.Counts.Failed += execMetrics.Counts.Failed - metrics.Executor.Counts.Ignored += execMetrics.Counts.Ignored - metrics.Executor.Counts.Critical += execMetrics.Counts.Critical - metrics.Executor.Counts.High += execMetrics.Counts.High - metrics.Executor.Counts.Medium += execMetrics.Counts.Medium - metrics.Executor.Counts.Low += execMetrics.Counts.Low - metrics.Executor.Timings.Adaptation += execMetrics.Timings.Adaptation - metrics.Executor.Timings.RunningChecks += execMetrics.Timings.RunningChecks - allResults = append(allResults, results...) } - metrics.Parser.Counts.ModuleDownloads = resolvers.Remote.GetDownloadCount() - - metrics.Timings.Total += metrics.Parser.Timings.DiskIODuration - metrics.Timings.Total += metrics.Parser.Timings.ParseDuration - metrics.Timings.Total += metrics.Executor.Timings.Adaptation - metrics.Timings.Total += metrics.Executor.Timings.RunningChecks - - return allResults, metrics, nil + return allResults, nil } func (s *Scanner) removeNestedDirs(dirs []string) []string { diff --git a/pkg/iac/scanners/terraform/scanner_test.go b/pkg/iac/scanners/terraform/scanner_test.go index 9e44893e0ff7..047ceb972a2a 100644 --- a/pkg/iac/scanners/terraform/scanner_test.go +++ b/pkg/iac/scanners/terraform/scanner_test.go @@ -13,7 +13,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/severity" - "github.com/aquasecurity/trivy/pkg/iac/state" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -63,61 +62,11 @@ func scanWithOptions(t *testing.T, code string, opt ...options.ScannerOption) sc }) scanner := New(opt...) - results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "project") + results, err := scanner.ScanFS(context.TODO(), fs, "project") require.NoError(t, err) return results } -func Test_OptionWithAlternativeIDProvider(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithAlternativeIDProvider(func(s string) []string { - return []string{"something", "altid", "blah"} - }), - } - results := scanWithOptions(t, ` -//tfsec:ignore:altid -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - -func Test_TrivyOptionWithAlternativeIDProvider(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithAlternativeIDProvider(func(s string) []string { - return []string{"something", "altid", "blah"} - }), - } - results := scanWithOptions(t, ` -//trivy:ignore:altid -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - -func Test_OptionWithSeverityOverrides(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithSeverityOverrides(map[string]string{"aws-service-abc": "LOW"}), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 1) - assert.Equal(t, severity.Low, results.GetFailed()[0].Severity()) -} - func Test_OptionWithDebugWriter(t *testing.T) { reg := rules.Register(alwaysFailRule) defer rules.Deregister(reg) @@ -133,67 +82,6 @@ resource "something" "else" {} require.Greater(t, buffer.Len(), 0) } -func Test_OptionNoIgnores(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - scannerOpts := []options.ScannerOption{ - ScannerWithNoIgnores(), - } - results := scanWithOptions(t, ` -//tfsec:ignore:aws-service-abc -resource "something" "else" {} -`, scannerOpts...) - require.Len(t, results.GetFailed(), 1) - require.Len(t, results.GetIgnored(), 0) - -} - -func Test_OptionExcludeRules(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithExcludedRules([]string{"aws-service-abc"}), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - -func Test_OptionIncludeRules(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - scannerOpts := []options.ScannerOption{ - ScannerWithIncludedRules([]string{"this-only"}), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, scannerOpts...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - -func Test_OptionWithMinimumSeverity(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - scannerOpts := []options.ScannerOption{ - ScannerWithMinimumSeverity(severity.Critical), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, scannerOpts...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - func Test_OptionWithPolicyDirs(t *testing.T) { fs := testutil.CreateFS(t, map[string]string{ @@ -374,7 +262,7 @@ cause := bucket.name options.ScannerWithPolicyNamespaces(test.includedNamespaces...), ) - results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") + results, err := scanner.ScanFS(context.TODO(), fs, "code") require.NoError(t, err) var found bool @@ -391,38 +279,6 @@ cause := bucket.name } -func Test_OptionWithStateFunc(t *testing.T) { - - fs := testutil.CreateFS(t, map[string]string{ - "code/main.tf": ` -resource "aws_s3_bucket" "my-bucket" { - bucket = "evil" -} -`, - }) - - var actual state.State - - debugLog := bytes.NewBuffer([]byte{}) - scanner := New( - options.ScannerWithDebug(debugLog), - ScannerWithStateFunc(func(s *state.State) { - require.NotNil(t, s) - actual = *s - }), - ) - - _, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") - require.NoError(t, err) - - assert.Equal(t, 1, len(actual.AWS.S3.Buckets)) - - if t.Failed() { - fmt.Printf("Debug logs:\n%s\n", debugLog.String()) - } - -} - func Test_OptionWithRegoOnly(t *testing.T) { fs := testutil.CreateFS(t, map[string]string{ @@ -830,62 +686,6 @@ resource "aws_s3_bucket_public_access_block" "testB" { } } -func Test_RegoInput(t *testing.T) { - - var regoInput interface{} - - opts := []options.ScannerOption{ - ScannerWithStateFunc(func(s *state.State) { - regoInput = s.ToRego() - }), - } - _ = scanWithOptions(t, ` -resource "aws_security_group" "example_security_group" { - name = "example_security_group" - - description = "Example SG" - - ingress { - description = "Allow SSH" - from_port = 22 - to_port = 22 - protocol = "tcp" - cidr_blocks = ["1.2.3.4", "5.6.7.8"] - } - -} -`, opts...) - - outer, ok := regoInput.(map[string]interface{}) - require.True(t, ok) - aws, ok := outer["aws"].(map[string]interface{}) - require.True(t, ok) - ec2, ok := aws["ec2"].(map[string]interface{}) - require.True(t, ok) - sgs, ok := ec2["securitygroups"].([]interface{}) - require.True(t, ok) - require.Len(t, sgs, 1) - sg0, ok := sgs[0].(map[string]interface{}) - require.True(t, ok) - ingress, ok := sg0["ingressrules"].([]interface{}) - require.True(t, ok) - require.Len(t, ingress, 1) - ingress0, ok := ingress[0].(map[string]interface{}) - require.True(t, ok) - cidrs, ok := ingress0["cidrs"].([]interface{}) - require.True(t, ok) - require.Len(t, cidrs, 2) - - cidr0, ok := cidrs[0].(map[string]interface{}) - require.True(t, ok) - - cidr1, ok := cidrs[1].(map[string]interface{}) - require.True(t, ok) - - assert.Equal(t, "1.2.3.4", cidr0["value"]) - assert.Equal(t, "5.6.7.8", cidr1["value"]) -} - // PoC for replacing Go with Rego: AVD-AWS-0001 func Test_RegoRules(t *testing.T) { diff --git a/pkg/iac/scanners/terraform/setup_test.go b/pkg/iac/scanners/terraform/setup_test.go index c1f1aeb2e8ca..84bf3fdcc338 100644 --- a/pkg/iac/scanners/terraform/setup_test.go +++ b/pkg/iac/scanners/terraform/setup_test.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/trivy/internal/testutil" "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/require" ) @@ -17,7 +17,7 @@ func createModulesFromSource(t *testing.T, source string, ext string) terraform. "source" + ext: source, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "."); err != nil { t.Fatal(err) } @@ -51,7 +51,7 @@ func scanJSON(t *testing.T, source string) scan.Results { }) s := New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) - results, _, err := s.ScanFSWithMetrics(context.TODO(), fs, ".") + results, err := s.ScanFS(context.TODO(), fs, ".") require.NoError(t, err) return results } diff --git a/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go b/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go index fe37184ebb20..664799f74036 100644 --- a/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go +++ b/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go @@ -13,11 +13,19 @@ import ( "github.com/stretchr/testify/require" ) -func Test_OptionWithPolicyDirs_OldRegoMetadata(t *testing.T) { - b, _ := os.ReadFile("test/testdata/plan.json") - fs := testutil.CreateFS(t, map[string]string{ - "/code/main.tfplan.json": string(b), - "/rules/test.rego": ` +func Test_TerraformScanner(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + inputFile string + inputRego string + options []options.ScannerOption + }{ + { + name: "old rego metadata", + inputFile: "test/testdata/plan.json", + inputRego: ` package defsec.abcdefg __rego_metadata__ := { @@ -43,36 +51,46 @@ deny[cause] { cause := bucket.name } `, - }) - - debugLog := bytes.NewBuffer([]byte{}) - scanner := New( - options.ScannerWithDebug(debugLog), - options.ScannerWithPolicyFilesystem(fs), - options.ScannerWithPolicyDirs("rules"), - options.ScannerWithRegoOnly(true), - options.ScannerWithEmbeddedPolicies(false), - ) - - results, err := scanner.ScanFS(context.TODO(), fs, "code") - require.NoError(t, err) - - require.Len(t, results.GetFailed(), 1) - - failure := results.GetFailed()[0] + options: []options.ScannerOption{ + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false)}, + }, + { + name: "with user namespace", + inputFile: "test/testdata/plan.json", + inputRego: ` +# METADATA +# title: Bad buckets are bad +# description: Bad buckets are bad because they are not good. +# scope: package +# schemas: +# - input: schema["input"] +# custom: +# avd_id: AVD-TEST-0123 +# severity: CRITICAL +# short_code: very-bad-misconfig +# recommended_action: "Fix the s3 bucket" - assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) - if t.Failed() { - fmt.Printf("Debug logs:\n%s\n", debugLog.String()) - } +package user.foobar.ABC001 +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "tfsec-plan-testing" + cause := bucket.name } - -func Test_OptionWithPolicyDirs_WithUserNamespace(t *testing.T) { - b, _ := os.ReadFile("test/testdata/plan.json") - fs := testutil.CreateFS(t, map[string]string{ - "/code/main.tfplan.json": string(b), - "/rules/test.rego": ` +`, + options: []options.ScannerOption{ + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithPolicyNamespaces("user"), + }, + }, + { + name: "with templated plan json", + inputFile: "test/testdata/plan_with_template.json", + inputRego: ` # METADATA # title: Bad buckets are bad # description: Bad buckets are bad because they are not good. @@ -89,32 +107,43 @@ package user.foobar.ABC001 deny[cause] { bucket := input.aws.s3.buckets[_] - bucket.name.value == "tfsec-plan-testing" + bucket.name.value == "${template-name-is-$evil}" cause := bucket.name } `, - }) + options: []options.ScannerOption{ + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithPolicyNamespaces("user"), + }, + }, + } - debugLog := bytes.NewBuffer([]byte{}) - scanner := New( - options.ScannerWithDebug(debugLog), - options.ScannerWithPolicyFilesystem(fs), - options.ScannerWithPolicyDirs("rules"), - options.ScannerWithRegoOnly(true), - options.ScannerWithPolicyNamespaces("user"), - options.ScannerWithEmbeddedPolicies(false), - ) + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + b, _ := os.ReadFile(tc.inputFile) + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tfplan.json": string(b), + "/rules/test.rego": tc.inputRego, + }) - results, err := scanner.ScanFS(context.TODO(), fs, "code") - require.NoError(t, err) + debugLog := bytes.NewBuffer([]byte{}) + so := append(tc.options, options.ScannerWithDebug(debugLog), options.ScannerWithPolicyFilesystem(fs)) + scanner := New(so...) - require.Len(t, results.GetFailed(), 1) + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) - failure := results.GetFailed()[0] + require.Len(t, results.GetFailed(), 1) - assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) - if t.Failed() { - fmt.Printf("Debug logs:\n%s\n", debugLog.String()) - } + failure := results.GetFailed()[0] + assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } + }) + } } diff --git a/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go b/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go index 4f81dec89751..97b9ba4fcf7b 100644 --- a/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go +++ b/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go @@ -9,7 +9,6 @@ import ( ) func Test_Parse_Plan_File(t *testing.T) { - planFile, err := parser.New().ParseFile("testdata/plan.json") require.NoError(t, err) diff --git a/pkg/iac/scanners/terraformplan/tfjson/test/testdata/plan_with_template.json b/pkg/iac/scanners/terraformplan/tfjson/test/testdata/plan_with_template.json new file mode 100644 index 000000000000..2ae6e5c8d7ed --- /dev/null +++ b/pkg/iac/scanners/terraformplan/tfjson/test/testdata/plan_with_template.json @@ -0,0 +1,480 @@ +{ + "format_version": "0.2", + "terraform_version": "1.0.3", + "variables": { + "bucket_name": { + "value": "${template-name-is-$evil}" + } + }, + "planned_values": { + "root_module": { + "resources": [ + { + "address": "aws_s3_bucket.planbucket", + "mode": "managed", + "type": "aws_s3_bucket", + "name": "planbucket", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 0, + "values": { + "bucket": "${template-name-is-$evil}", + "bucket_prefix": null, + "force_destroy": false, + "logging": [ + { + "target_bucket": "arn:aws:s3:::iac-tfsec-dev", + "target_prefix": null + } + ], + "tags": null, + "versioning": [ + { + "enabled": true, + "mfa_delete": false + } + ] + }, + "sensitive_values": { + "cors_rule": [], + "grant": [], + "lifecycle_rule": [], + "logging": [ + {} + ], + "object_lock_configuration": [], + "replication_configuration": [], + "server_side_encryption_configuration": [], + "tags_all": {}, + "versioning": [ + {} + ], + "website": [] + } + }, + { + "address": "aws_s3_bucket_server_side_encryption_configuration.example", + "mode": "managed", + "type": "aws_s3_bucket_server_side_encryption_configuration", + "name": "example", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 0, + "values": { + "expected_bucket_owner": null, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + { + "kms_master_key_id": "", + "sse_algorithm": "AES256" + } + ], + "bucket_key_enabled": true + } + ] + }, + "sensitive_values": { + "rule": [ + { + "apply_server_side_encryption_by_default": [ + {} + ] + } + ] + } + }, + { + "address": "aws_security_group.sg", + "mode": "managed", + "type": "aws_security_group", + "name": "sg", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 1, + "values": { + "description": "Managed by Terraform", + "ingress": [ + { + "cidr_blocks": [ + "0.0.0.0/0" + ], + "description": "", + "from_port": 80, + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "protocol": "tcp", + "security_groups": [], + "self": false, + "to_port": 80 + } + ], + "name": "sg", + "revoke_rules_on_delete": false, + "tags": { + "Name": "blah" + }, + "tags_all": { + "Name": "blah" + }, + "timeouts": null + }, + "sensitive_values": { + "egress": [], + "ingress": [ + { + "cidr_blocks": [ + false + ], + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "security_groups": [] + } + ], + "tags": {}, + "tags_all": {} + } + } + ] + } + }, + "resource_changes": [ + { + "address": "aws_s3_bucket.planbucket", + "mode": "managed", + "type": "aws_s3_bucket", + "name": "planbucket", + "provider_name": "registry.terraform.io/hashicorp/aws", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "bucket": "${template-name-is-$evil}", + "bucket_prefix": null, + "force_destroy": false, + "logging": [ + { + "target_bucket": "arn:aws:s3:::iac-tfsec-dev", + "target_prefix": null + } + ], + "tags": null, + "versioning": [ + { + "enabled": true, + "mfa_delete": false + } + ] + }, + "after_unknown": { + "acceleration_status": true, + "acl": true, + "arn": true, + "bucket_domain_name": true, + "bucket_regional_domain_name": true, + "cors_rule": true, + "grant": true, + "hosted_zone_id": true, + "id": true, + "lifecycle_rule": true, + "logging": [ + {} + ], + "object_lock_configuration": true, + "object_lock_enabled": true, + "policy": true, + "region": true, + "replication_configuration": true, + "request_payer": true, + "server_side_encryption_configuration": true, + "tags_all": true, + "versioning": [ + {} + ], + "website": true, + "website_domain": true, + "website_endpoint": true + }, + "before_sensitive": false, + "after_sensitive": { + "cors_rule": [], + "grant": [], + "lifecycle_rule": [], + "logging": [ + {} + ], + "object_lock_configuration": [], + "replication_configuration": [], + "server_side_encryption_configuration": [], + "tags_all": {}, + "versioning": [ + {} + ], + "website": [] + } + } + }, + { + "address": "aws_s3_bucket_server_side_encryption_configuration.example", + "mode": "managed", + "type": "aws_s3_bucket_server_side_encryption_configuration", + "name": "example", + "provider_name": "registry.terraform.io/hashicorp/aws", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "expected_bucket_owner": null, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + { + "kms_master_key_id": "", + "sse_algorithm": "AES256" + } + ], + "bucket_key_enabled": true + } + ] + }, + "after_unknown": { + "bucket": true, + "id": true, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + {} + ] + } + ] + }, + "before_sensitive": false, + "after_sensitive": { + "rule": [ + { + "apply_server_side_encryption_by_default": [ + {} + ] + } + ] + } + } + }, + { + "address": "aws_security_group.sg", + "mode": "managed", + "type": "aws_security_group", + "name": "sg", + "provider_name": "registry.terraform.io/hashicorp/aws", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "description": "Managed by Terraform", + "ingress": [ + { + "cidr_blocks": [ + "0.0.0.0/0" + ], + "description": "", + "from_port": 80, + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "protocol": "tcp", + "security_groups": [], + "self": false, + "to_port": 80 + } + ], + "name": "sg", + "revoke_rules_on_delete": false, + "tags": { + "Name": "blah" + }, + "tags_all": { + "Name": "blah" + }, + "timeouts": null + }, + "after_unknown": { + "arn": true, + "egress": true, + "id": true, + "ingress": [ + { + "cidr_blocks": [ + false + ], + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "security_groups": [] + } + ], + "name_prefix": true, + "owner_id": true, + "tags": {}, + "tags_all": {}, + "vpc_id": true + }, + "before_sensitive": false, + "after_sensitive": { + "egress": [], + "ingress": [ + { + "cidr_blocks": [ + false + ], + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "security_groups": [] + } + ], + "tags": {}, + "tags_all": {} + } + } + } + ], + "prior_state": { + "format_version": "0.2", + "terraform_version": "1.0.3", + "values": { + "root_module": { + "resources": [ + { + "address": "data.aws_s3_bucket.logging_bucket", + "mode": "data", + "type": "aws_s3_bucket", + "name": "logging_bucket", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 0, + "values": { + "arn": "arn:aws:s3:::iac-tfsec-dev", + "bucket": "iac-tfsec-dev", + "bucket_domain_name": "iac-tfsec-dev.s3.amazonaws.com", + "bucket_regional_domain_name": "iac-tfsec-dev.s3.amazonaws.com", + "hosted_zone_id": "Z3AQBSTGFYJSTF", + "id": "iac-tfsec-dev", + "region": "us-east-1", + "website_domain": null, + "website_endpoint": null + }, + "sensitive_values": {} + } + ] + } + } + }, + "configuration": { + "provider_config": { + "aws": { + "name": "aws" + } + }, + "root_module": { + "resources": [ + { + "address": "aws_s3_bucket.planbucket", + "mode": "managed", + "type": "aws_s3_bucket", + "name": "planbucket", + "provider_config_key": "aws", + "expressions": { + "bucket": { + "references": [ + "var.bucket_name" + ] + }, + "logging": [ + { + "target_bucket": { + "references": [ + "data.aws_s3_bucket.logging_bucket.arn", + "data.aws_s3_bucket.logging_bucket" + ] + } + } + ], + "versioning": [ + { + "enabled": { + "constant_value": true + } + } + ] + }, + "schema_version": 0 + }, + { + "address": "aws_s3_bucket_server_side_encryption_configuration.example", + "mode": "managed", + "type": "aws_s3_bucket_server_side_encryption_configuration", + "name": "example", + "provider_config_key": "aws", + "expressions": { + "bucket": { + "references": [ + "aws_s3_bucket.planbucket.id", + "aws_s3_bucket.planbucket" + ] + }, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + { + "sse_algorithm": { + "constant_value": "AES256" + } + } + ], + "bucket_key_enabled": { + "constant_value": true + } + } + ] + }, + "schema_version": 0 + }, + { + "address": "aws_security_group.sg", + "mode": "managed", + "type": "aws_security_group", + "name": "sg", + "provider_config_key": "aws", + "expressions": { + "name": { + "constant_value": "sg" + }, + "tags": { + "constant_value": { + "Name": "blah" + } + } + }, + "schema_version": 1 + }, + { + "address": "data.aws_s3_bucket.logging_bucket", + "mode": "data", + "type": "aws_s3_bucket", + "name": "logging_bucket", + "provider_config_key": "aws", + "expressions": { + "bucket": { + "constant_value": "iac-tfsec-dev" + } + }, + "schema_version": 0 + } + ], + "variables": { + "bucket_name": { + "default": "${template-name-is-$evil}" + } + } + } + } +} \ No newline at end of file diff --git a/pkg/iac/terraform/ignore.go b/pkg/iac/terraform/ignore.go deleted file mode 100644 index e52fbf202be5..000000000000 --- a/pkg/iac/terraform/ignore.go +++ /dev/null @@ -1,100 +0,0 @@ -package terraform - -import ( - "fmt" - "time" - - "github.com/zclconf/go-cty/cty" - - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" -) - -type Ignore struct { - Range iacTypes.Range - RuleID string - Expiry *time.Time - Workspace string - Block bool - Params map[string]string -} - -type Ignores []Ignore - -func (ignores Ignores) Covering(modules Modules, m iacTypes.Metadata, workspace string, ids ...string) *Ignore { - for _, ignore := range ignores { - if ignore.Covering(modules, m, workspace, ids...) { - return &ignore - } - } - return nil -} - -func (ignore Ignore) Covering(modules Modules, m iacTypes.Metadata, workspace string, ids ...string) bool { - if ignore.Expiry != nil && time.Now().After(*ignore.Expiry) { - return false - } - if ignore.Workspace != "" && ignore.Workspace != workspace { - return false - } - idMatch := ignore.RuleID == "*" || len(ids) == 0 - for _, id := range ids { - if id == ignore.RuleID { - idMatch = true - break - } - } - if !idMatch { - return false - } - - metaHierarchy := &m - for metaHierarchy != nil { - if ignore.Range.GetFilename() != metaHierarchy.Range().GetFilename() { - metaHierarchy = metaHierarchy.Parent() - continue - } - if metaHierarchy.Range().GetStartLine() == ignore.Range.GetStartLine()+1 || metaHierarchy.Range().GetStartLine() == ignore.Range.GetStartLine() { - return ignore.MatchParams(modules, metaHierarchy) - } - metaHierarchy = metaHierarchy.Parent() - } - return false - -} - -func (ignore Ignore) MatchParams(modules Modules, blockMetadata *iacTypes.Metadata) bool { - if len(ignore.Params) == 0 { - return true - } - block := modules.GetBlockByIgnoreRange(blockMetadata) - if block == nil { - return true - } - for key, val := range ignore.Params { - attr := block.GetAttribute(key) - if attr.IsNil() || !attr.Value().IsKnown() { - return false - } - switch attr.Type() { - case cty.String: - if !attr.Equals(val) { - return false - } - case cty.Number: - bf := attr.Value().AsBigFloat() - f64, _ := bf.Float64() - comparableInt := fmt.Sprintf("%d", int(f64)) - comparableFloat := fmt.Sprintf("%f", f64) - if val != comparableInt && val != comparableFloat { - return false - } - case cty.Bool: - if fmt.Sprintf("%t", attr.IsTrue()) != val { - return false - } - default: - return false - } - } - return true -} diff --git a/pkg/iac/terraform/module.go b/pkg/iac/terraform/module.go index dd89fa2bd40d..fec6ad7c8d0e 100644 --- a/pkg/iac/terraform/module.go +++ b/pkg/iac/terraform/module.go @@ -3,6 +3,8 @@ package terraform import ( "fmt" "strings" + + "github.com/aquasecurity/trivy/pkg/iac/ignore" ) type Module struct { @@ -10,11 +12,11 @@ type Module struct { blockMap map[string]Blocks rootPath string modulePath string - ignores Ignores + ignores ignore.Rules parent *Module } -func NewModule(rootPath, modulePath string, blocks Blocks, ignores Ignores) *Module { +func NewModule(rootPath, modulePath string, blocks Blocks, ignores ignore.Rules) *Module { blockMap := make(map[string]Blocks) @@ -41,7 +43,7 @@ func (c *Module) RootPath() string { return c.rootPath } -func (c *Module) Ignores() Ignores { +func (c *Module) Ignores() ignore.Rules { return c.ignores } diff --git a/pkg/iac/terraform/resource_block.go b/pkg/iac/terraform/resource_block.go index cc50c8d9b872..3339675ee304 100644 --- a/pkg/iac/terraform/resource_block.go +++ b/pkg/iac/terraform/resource_block.go @@ -3,6 +3,7 @@ package terraform import ( "bytes" "fmt" + "regexp" "strings" "text/template" ) @@ -91,13 +92,7 @@ func renderPrimitive(val interface{}) string { case PlanReference: return fmt.Sprintf("%v", t.Value) case string: - if strings.Contains(t, "\n") { - return fmt.Sprintf(`< 0 { - pkg.Digest = c.Files[0].Hash + + for _, f := range c.Files { + if f.Path != "" && pkg.FilePath == "" { + pkg.FilePath = f.Path + } + // An empty path represents a package digest + if f.Path == "" && len(f.Digests) > 0 { + pkg.Digest = f.Digests[0] + } } if p.Class() == types.ClassOSPkg { - m.fillSrcPkg(pkg) + m.fillSrcPkg(c, pkg) } return pkg, nil @@ -241,7 +257,12 @@ func (m *Decoder) pkgName(pkg *ftypes.Package, c *core.Component) string { return c.Name } -func (m *Decoder) fillSrcPkg(pkg *ftypes.Package) { +func (m *Decoder) fillSrcPkg(c *core.Component, pkg *ftypes.Package) { + if c.SrcName != "" && pkg.SrcName == "" { + pkg.SrcName = c.SrcName + } + m.parseSrcVersion(pkg, c.SrcVersion) + // Fill source package information for components in third-party SBOMs . if pkg.SrcName == "" { pkg.SrcName = pkg.Name @@ -257,6 +278,29 @@ func (m *Decoder) fillSrcPkg(pkg *ftypes.Package) { } } +// parseSrcVersion parses the version of the source package. +func (m *Decoder) parseSrcVersion(pkg *ftypes.Package, ver string) { + if ver == "" { + return + } + switch pkg.Identifier.PURL.Type { + case packageurl.TypeRPM: + v := rpmver.NewVersion(ver) + pkg.SrcEpoch = v.Epoch() + pkg.SrcVersion = v.Version() + pkg.SrcRelease = v.Release() + case packageurl.TypeDebian: + v, err := debver.NewVersion(ver) + if err != nil { + log.Logger.Debugw("Failed to parse Debian version", zap.Error(err)) + return + } + pkg.SrcEpoch = v.Epoch() + pkg.SrcVersion = v.Version() + pkg.SrcRelease = v.Revision() + } +} + // addOSPkgs traverses relationships and adds OS packages func (m *Decoder) addOSPkgs(sbom *types.SBOM) { var pkgs []ftypes.Package diff --git a/pkg/sbom/io/encode.go b/pkg/sbom/io/encode.go index 73c0d4fef3dc..5bb181992975 100644 --- a/pkg/sbom/io/encode.go +++ b/pkg/sbom/io/encode.go @@ -2,48 +2,52 @@ package io import ( "fmt" + "slices" "strconv" "github.com/package-url/packageurl-go" "github.com/samber/lo" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/digest" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/purl" "github.com/aquasecurity/trivy/pkg/sbom/core" + "github.com/aquasecurity/trivy/pkg/scanner/utils" "github.com/aquasecurity/trivy/pkg/types" ) type Encoder struct { - bom *core.BOM + bom *core.BOM + opts core.Options } -func NewEncoder() *Encoder { - return &Encoder{} +func NewEncoder(opts core.Options) *Encoder { + return &Encoder{opts: opts} } -func (m *Encoder) Encode(report types.Report) (*core.BOM, error) { +func (e *Encoder) Encode(report types.Report) (*core.BOM, error) { // Metadata component - root, err := m.rootComponent(report) + root, err := e.rootComponent(report) if err != nil { return nil, xerrors.Errorf("failed to create root component: %w", err) } - m.bom = core.NewBOM() - m.bom.AddComponent(root) + e.bom = core.NewBOM(e.opts) + e.bom.AddComponent(root) for _, result := range report.Results { - m.encodeResult(root, report.Metadata, result) + e.encodeResult(root, report.Metadata, result) } // Components that do not have their own dependencies MUST be declared as empty elements within the graph. - if _, ok := m.bom.Relationships()[root.ID()]; !ok { - m.bom.AddRelationship(root, nil, "") + if _, ok := e.bom.Relationships()[root.ID()]; !ok { + e.bom.AddRelationship(root, nil, "") } - return m.bom, nil + return e.bom, nil } -func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { +func (e *Encoder) rootComponent(r types.Report) (*core.Component, error) { root := &core.Component{ Root: true, Name: r.ArtifactName, @@ -58,7 +62,7 @@ func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { switch r.ArtifactType { case ftypes.ArtifactContainerImage: - root.Type = core.TypeContainer + root.Type = core.TypeContainerImage props = append(props, core.Property{ Name: core.PropertyImageID, Value: r.Metadata.ImageID, @@ -73,9 +77,11 @@ func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { } case ftypes.ArtifactVM: - root.Type = core.TypeContainer - case ftypes.ArtifactFilesystem, ftypes.ArtifactRepository: - root.Type = core.TypeApplication + root.Type = core.TypeVM + case ftypes.ArtifactFilesystem: + root.Type = core.TypeFilesystem + case ftypes.ArtifactRepository: + root.Type = core.TypeRepository case ftypes.ArtifactCycloneDX: return r.BOM.Root(), nil } @@ -113,9 +119,8 @@ func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { return root, nil } -func (m *Encoder) encodeResult(root *core.Component, metadata types.Metadata, result types.Result) { - if result.Type == ftypes.NodePkg || result.Type == ftypes.PythonPkg || - result.Type == ftypes.GemSpec || result.Type == ftypes.Jar || result.Type == ftypes.CondaPkg { +func (e *Encoder) encodeResult(root *core.Component, metadata types.Metadata, result types.Result) { + if slices.Contains(ftypes.AggregatingTypes, result.Type) { // If a package is language-specific package that isn't associated with a lock file, // it will be a dependency of a component under "metadata". // e.g. @@ -126,7 +131,7 @@ func (m *Encoder) encodeResult(root *core.Component, metadata types.Metadata, re // ref. https://cyclonedx.org/use-cases/#inventory // Dependency graph from #1 to #2 - m.encodePackages(root, result) + e.encodePackages(root, result) } else if result.Class == types.ClassOSPkg || result.Class == types.ClassLangPkg { // If a package is OS package, it will be a dependency of "Operating System" component. // e.g. @@ -146,21 +151,21 @@ func (m *Encoder) encodeResult(root *core.Component, metadata types.Metadata, re // -> etc. // #2 - appComponent := m.resultComponent(root, result, metadata.OS) + appComponent := e.resultComponent(root, result, metadata.OS) // #3 - m.encodePackages(appComponent, result) + e.encodePackages(appComponent, result) } } -func (m *Encoder) encodePackages(parent *core.Component, result types.Result) { +func (e *Encoder) encodePackages(parent *core.Component, result types.Result) { // Get dependency parents first parents := ftypes.Packages(result.Packages).ParentDeps() // Group vulnerabilities by package ID vulns := make(map[string][]core.Vulnerability) for _, vuln := range result.Vulnerabilities { - v := m.vulnerability(vuln) + v := e.vulnerability(vuln) vulns[v.PkgID] = append(vulns[v.PkgID], v) } @@ -171,15 +176,15 @@ func (m *Encoder) encodePackages(parent *core.Component, result types.Result) { result.Packages[i].ID = pkgID // Convert packages to components - c := m.component(result.Type, pkg) - components[pkgID] = c + c := e.component(result, pkg) + components[pkgID+pkg.FilePath] = c // Add a component - m.bom.AddComponent(c) + e.bom.AddComponent(c) // Add vulnerabilities if vv := vulns[pkgID]; vv != nil { - m.bom.AddVulnerabilities(c, vv) + e.bom.AddVulnerabilities(c, vv) } } @@ -190,26 +195,26 @@ func (m *Encoder) encodePackages(parent *core.Component, result types.Result) { continue } - directPkg := components[pkg.ID] - m.bom.AddRelationship(parent, directPkg, core.RelationshipContains) + directPkg := components[pkg.ID+pkg.FilePath] + e.bom.AddRelationship(parent, directPkg, core.RelationshipContains) for _, dep := range pkg.DependsOn { indirectPkg, ok := components[dep] if !ok { continue } - m.bom.AddRelationship(directPkg, indirectPkg, core.RelationshipDependsOn) + e.bom.AddRelationship(directPkg, indirectPkg, core.RelationshipDependsOn) } // Components that do not have their own dependencies MUST be declared as empty elements within the graph. // TODO: Should check if the component has actually no dependencies or the dependency graph is not supported. if len(pkg.DependsOn) == 0 { - m.bom.AddRelationship(directPkg, nil, "") + e.bom.AddRelationship(directPkg, nil, "") } } } -func (m *Encoder) resultComponent(root *core.Component, r types.Result, osFound *ftypes.OS) *core.Component { +func (e *Encoder) resultComponent(root *core.Component, r types.Result, osFound *ftypes.OS) *core.Component { component := &core.Component{ Name: r.Target, Properties: []core.Property{ @@ -235,18 +240,24 @@ func (m *Encoder) resultComponent(root *core.Component, r types.Result, osFound component.Type = core.TypeApplication } - m.bom.AddRelationship(root, component, core.RelationshipContains) + e.bom.AddRelationship(root, component, core.RelationshipContains) return component } -func (*Encoder) component(pkgType ftypes.TargetType, pkg ftypes.Package) *core.Component { +func (*Encoder) component(result types.Result, pkg ftypes.Package) *core.Component { name := pkg.Name - version := pkg.Version + version := utils.FormatVersion(pkg) var group string // there are cases when we can't build purl // e.g. local Go packages if pu := pkg.Identifier.PURL; pu != nil { version = pu.Version + for _, q := range pu.Qualifiers { + if q.Key == "epoch" && q.Value != "0" { + version = fmt.Sprintf("%s:%s", q.Value, version) + } + } + // Use `group` field for GroupID and `name` for ArtifactID for java files // https://github.com/aquasecurity/trivy/issues/4675 // Use `group` field for npm scopes @@ -264,7 +275,7 @@ func (*Encoder) component(pkgType ftypes.TargetType, pkg ftypes.Package) *core.C }, { Name: core.PropertyPkgType, - Value: string(pkgType), + Value: string(result.Type), }, { Name: core.PropertyFilePath, @@ -303,16 +314,25 @@ func (*Encoder) component(pkgType ftypes.TargetType, pkg ftypes.Package) *core.C var files []core.File if pkg.FilePath != "" || pkg.Digest != "" { files = append(files, core.File{ - Path: pkg.FilePath, - Hash: pkg.Digest, + Path: pkg.FilePath, + Digests: lo.Ternary(pkg.Digest != "", []digest.Digest{pkg.Digest}, nil), }) } + // TODO(refactor): simplify the list of conditions + var srcFile string + if result.Class == types.ClassLangPkg && !slices.Contains(ftypes.AggregatingTypes, result.Type) { + srcFile = result.Target + } + return &core.Component{ - Type: core.TypeLibrary, - Name: name, - Group: group, - Version: version, + Type: core.TypeLibrary, + Name: name, + Group: group, + Version: version, + SrcName: pkg.SrcName, + SrcVersion: utils.FormatSrcVersion(pkg), + SrcFile: srcFile, PkgID: core.PkgID{ PURL: pkg.Identifier.PURL, }, diff --git a/pkg/sbom/io/encode_test.go b/pkg/sbom/io/encode_test.go index 5c2af5b54d9f..a57bddd9983d 100644 --- a/pkg/sbom/io/encode_test.go +++ b/pkg/sbom/io/encode_test.go @@ -113,7 +113,7 @@ func TestEncoder_Encode(t *testing.T) { }, wantComponents: map[uuid.UUID]*core.Component{ uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000001"): { - Type: core.TypeContainer, + Type: core.TypeContainerImage, Name: "debian:12", Root: true, PkgID: core.PkgID{ @@ -320,7 +320,8 @@ func TestEncoder_Encode(t *testing.T) { t.Run(tt.name, func(t *testing.T) { uuid.SetFakeUUID(t, "3ff14136-e09f-4df9-80ea-%012d") - got, err := sbomio.NewEncoder().Encode(tt.report) + opts := core.Options{GenerateBOMRef: true} + got, err := sbomio.NewEncoder(opts).Encode(tt.report) if tt.wantErr != "" { require.ErrorContains(t, err, tt.wantErr) return diff --git a/pkg/sbom/sbom.go b/pkg/sbom/sbom.go index 2d8d74b267a0..5b1055ed7174 100644 --- a/pkg/sbom/sbom.go +++ b/pkg/sbom/sbom.go @@ -183,8 +183,7 @@ func decodeAttestCycloneDXJSONFormat(r io.ReadSeeker) (Format, bool) { func Decode(f io.Reader, format Format) (types.SBOM, error) { var ( v interface{} - bom = core.NewBOM() - sbom types.SBOM + bom = core.NewBOM(core.Options{}) decoder interface{ Decode(any) error } ) @@ -212,10 +211,10 @@ func Decode(f io.Reader, format Format) (types.SBOM, error) { } decoder = json.NewDecoder(f) case FormatSPDXJSON: - v = &spdx.SPDX{SBOM: &sbom} + v = &spdx.SPDX{BOM: bom} decoder = json.NewDecoder(f) case FormatSPDXTV: - v = &spdx.SPDX{SBOM: &sbom} + v = &spdx.SPDX{BOM: bom} decoder = spdx.NewTVDecoder(f) default: return types.SBOM{}, xerrors.Errorf("%s scanning is not yet supported", format) @@ -227,11 +226,7 @@ func Decode(f io.Reader, format Format) (types.SBOM, error) { return types.SBOM{}, xerrors.Errorf("failed to decode: %w", err) } - // TODO: use BOM in SPDX - if format == FormatSPDXJSON || format == FormatSPDXTV { - return sbom, nil - } - + var sbom types.SBOM if err := sbomio.NewDecoder(bom).Decode(&sbom); err != nil { return types.SBOM{}, xerrors.Errorf("failed to decode: %w", err) } diff --git a/pkg/sbom/spdx/marshal.go b/pkg/sbom/spdx/marshal.go index ceb9a1ae24ce..6c1490fe1aec 100644 --- a/pkg/sbom/spdx/marshal.go +++ b/pkg/sbom/spdx/marshal.go @@ -4,26 +4,25 @@ import ( "context" "fmt" "sort" - "strconv" "strings" "time" "github.com/mitchellh/hashstructure/v2" + "github.com/package-url/packageurl-go" "github.com/samber/lo" "github.com/spdx/tools-golang/spdx" "github.com/spdx/tools-golang/spdx/v2/common" spdxutils "github.com/spdx/tools-golang/utils" - "golang.org/x/exp/maps" + "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/clock" "github.com/aquasecurity/trivy/pkg/digest" - ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/licensing" "github.com/aquasecurity/trivy/pkg/licensing/expression" "github.com/aquasecurity/trivy/pkg/log" - "github.com/aquasecurity/trivy/pkg/purl" - "github.com/aquasecurity/trivy/pkg/scanner/utils" + "github.com/aquasecurity/trivy/pkg/sbom/core" + sbomio "github.com/aquasecurity/trivy/pkg/sbom/io" "github.com/aquasecurity/trivy/pkg/types" "github.com/aquasecurity/trivy/pkg/uuid" ) @@ -40,19 +39,6 @@ const ( CategoryPackageManager = "PACKAGE-MANAGER" RefTypePurl = "purl" - PropertySchemaVersion = "SchemaVersion" - - // Image properties - PropertySize = "Size" - PropertyImageID = "ImageID" - PropertyRepoDigest = "RepoDigest" - PropertyDiffID = "DiffID" - PropertyRepoTag = "RepoTag" - - // Package properties - PropertyPkgID = "PkgID" - PropertyLayerDiffID = "LayerDiffID" - PropertyLayerDigest = "LayerDigest" // Package Purpose fields PackagePurposeOS = "OPERATING-SYSTEM" PackagePurposeContainer = "CONTAINER" @@ -75,8 +61,20 @@ const ( var ( SourcePackagePrefix = "built package from" + SourceFilePrefix = "package found in" ) +// duplicateProperties contains a list of properties contained in other fields. +var duplicateProperties = []string{ + // `SourceInfo` contains SrcName and SrcVersion (it contains PropertySrcRelease and PropertySrcEpoch) + core.PropertySrcName, + core.PropertySrcRelease, + core.PropertySrcEpoch, + core.PropertySrcVersion, + // `File` contains filePath. + core.PropertyFilePath, +} + type Marshaler struct { format spdx.Document hasher Hash @@ -107,75 +105,95 @@ func NewMarshaler(version string, opts ...marshalOption) *Marshaler { return m } -func (m *Marshaler) Marshal(ctx context.Context, r types.Report) (*spdx.Document, error) { - var relationShips []*spdx.Relationship - packages := make(map[spdx.ElementID]*spdx.Package) - pkgDownloadLocation := getPackageDownloadLocation(r.ArtifactType, r.ArtifactName) +func (m *Marshaler) MarshalReport(ctx context.Context, report types.Report) (*spdx.Document, error) { + // Convert into an intermediate representation + bom, err := sbomio.NewEncoder(core.Options{}).Encode(report) + if err != nil { + return nil, xerrors.Errorf("failed to marshal report: %w", err) + } + + return m.Marshal(ctx, bom) +} + +func (m *Marshaler) Marshal(ctx context.Context, bom *core.BOM) (*spdx.Document, error) { + var ( + relationShips []*spdx.Relationship + packages []*spdx.Package + ) + + root := bom.Root() + pkgDownloadLocation := m.packageDownloadLocation(root) + + // Component ID => SPDX ID + packageIDs := make(map[uuid.UUID]spdx.ElementID) // Root package contains OS, OS packages, language-specific packages and so on. - rootPkg, err := m.rootPackage(r, pkgDownloadLocation) + rootPkg, err := m.rootSPDXPackage(root, pkgDownloadLocation) if err != nil { return nil, xerrors.Errorf("failed to generate a root package: %w", err) } - packages[rootPkg.PackageSPDXIdentifier] = rootPkg + packages = append(packages, rootPkg) relationShips = append(relationShips, - relationShip(DocumentSPDXIdentifier, rootPkg.PackageSPDXIdentifier, RelationShipDescribe), + m.spdxRelationShip(DocumentSPDXIdentifier, rootPkg.PackageSPDXIdentifier, RelationShipDescribe), ) + packageIDs[root.ID()] = rootPkg.PackageSPDXIdentifier - var spdxFiles []*spdx.File - - for _, result := range r.Results { - if len(result.Packages) == 0 { + var files []*spdx.File + for _, c := range bom.Components() { + if c.Root { continue } - parentPackage, err := m.resultToSpdxPackage(result, r.Metadata.OS, pkgDownloadLocation) + spdxPackage, err := m.spdxPackage(c, pkgDownloadLocation) if err != nil { - return nil, xerrors.Errorf("failed to parse result: %w", err) + return nil, xerrors.Errorf("spdx package error: %w", err) } - packages[parentPackage.PackageSPDXIdentifier] = &parentPackage - relationShips = append(relationShips, - relationShip(rootPkg.PackageSPDXIdentifier, parentPackage.PackageSPDXIdentifier, RelationShipContains), - ) - - for _, pkg := range result.Packages { - spdxPackage, err := m.pkgToSpdxPackage(result.Type, pkgDownloadLocation, result.Class, r.Metadata, pkg) - if err != nil { - return nil, xerrors.Errorf("failed to parse package: %w", err) - } - packages[spdxPackage.PackageSPDXIdentifier] = &spdxPackage + packages = append(packages, &spdxPackage) + packageIDs[c.ID()] = spdxPackage.PackageSPDXIdentifier + + spdxFiles, err := m.spdxFiles(c) + if err != nil { + return nil, xerrors.Errorf("spdx files error: %w", err) + } else if len(spdxFiles) == 0 { + continue + } + + files = append(files, spdxFiles...) + for _, file := range spdxFiles { relationShips = append(relationShips, - relationShip(parentPackage.PackageSPDXIdentifier, spdxPackage.PackageSPDXIdentifier, RelationShipContains), + m.spdxRelationShip(spdxPackage.PackageSPDXIdentifier, file.FileSPDXIdentifier, RelationShipContains), ) - files, err := m.pkgFiles(pkg) - if err != nil { - return nil, xerrors.Errorf("package file error: %w", err) - } else if files == nil { - continue - } + } + verificationCode, err := spdxutils.GetVerificationCode(spdxFiles, "") + if err != nil { + return nil, xerrors.Errorf("package verification error: %w", err) + } + spdxPackage.FilesAnalyzed = true + spdxPackage.PackageVerificationCode = &verificationCode + } - spdxFiles = append(spdxFiles, files...) - for _, file := range files { - relationShips = append(relationShips, - relationShip(spdxPackage.PackageSPDXIdentifier, file.FileSPDXIdentifier, RelationShipContains), - ) + for id, rels := range bom.Relationships() { + for _, rel := range rels { + refA, ok := packageIDs[id] + if !ok { + continue } - - verificationCode, err := spdxutils.GetVerificationCode(files, "") - if err != nil { - return nil, xerrors.Errorf("package verification error: %w", err) + refB, ok := packageIDs[rel.Dependency] + if !ok { + continue } - - spdxPackage.FilesAnalyzed = true - spdxPackage.PackageVerificationCode = &verificationCode + relationShips = append(relationShips, m.spdxRelationShip(refA, refB, m.spdxRelationshipType(rel.Type))) } } + sortPackages(packages) + sortRelationships(relationShips) + sortFiles(files) return &spdx.Document{ SPDXVersion: spdx.Version, DataLicense: spdx.DataLicense, SPDXIdentifier: DocumentSPDXIdentifier, - DocumentName: r.ArtifactName, - DocumentNamespace: getDocumentNamespace(r, m), + DocumentName: root.Name, + DocumentNamespace: getDocumentNamespace(root), CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -189,214 +207,215 @@ func (m *Marshaler) Marshal(ctx context.Context, r types.Report) (*spdx.Document }, Created: clock.Now(ctx).UTC().Format(time.RFC3339), }, - Packages: toPackages(packages), + Packages: packages, Relationships: relationShips, - Files: spdxFiles, + Files: files, }, nil } -func toPackages(packages map[spdx.ElementID]*spdx.Package) []*spdx.Package { - ret := maps.Values(packages) - sort.Slice(ret, func(i, j int) bool { - if ret[i].PackageName != ret[j].PackageName { - return ret[i].PackageName < ret[j].PackageName - } - return ret[i].PackageSPDXIdentifier < ret[j].PackageSPDXIdentifier - }) - return ret -} - -func (m *Marshaler) resultToSpdxPackage(result types.Result, os *ftypes.OS, pkgDownloadLocation string) (spdx.Package, error) { - switch result.Class { - case types.ClassOSPkg: - osPkg, err := m.osPackage(os, pkgDownloadLocation) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to parse operating system package: %w", err) - } - return osPkg, nil - case types.ClassLangPkg: - langPkg, err := m.langPackage(result.Target, pkgDownloadLocation, result.Type) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to parse application package: %w", err) - } - return langPkg, nil - default: - // unsupported packages - return spdx.Package{}, nil - } -} - -func (m *Marshaler) parseFile(filePath string, d digest.Digest) (spdx.File, error) { - pkgID, err := calcPkgID(m.hasher, filePath) - if err != nil { - return spdx.File{}, xerrors.Errorf("failed to get %s package ID: %w", filePath, err) - } - file := spdx.File{ - FileSPDXIdentifier: spdx.ElementID(fmt.Sprintf("File-%s", pkgID)), - FileName: filePath, - Checksums: digestToSpdxFileChecksum(d), +func (m *Marshaler) packageDownloadLocation(root *core.Component) string { + location := noneField + // this field is used for git/mercurial/subversion/bazaar: + // https://spdx.github.io/spdx-spec/v2.2.2/package-information/#77-package-download-location-field + if root.Type == core.TypeRepository { + // Trivy currently only supports git repositories. Format examples: + // git+https://git.myproject.org/MyProject.git + // git+http://git.myproject.org/MyProject + location = fmt.Sprintf("git+%s", root.Name) } - return file, nil + return location } -func (m *Marshaler) rootPackage(r types.Report, pkgDownloadLocation string) (*spdx.Package, error) { +func (m *Marshaler) rootSPDXPackage(root *core.Component, pkgDownloadLocation string) (*spdx.Package, error) { var externalReferences []*spdx.PackageExternalReference - attributionTexts := []string{attributionText(PropertySchemaVersion, strconv.Itoa(r.SchemaVersion))} - // When the target is a container image, add PURL to the external references of the root package. - if p, err := purl.New(purl.TypeOCI, r.Metadata, ftypes.Package{}); err != nil { - return nil, xerrors.Errorf("failed to new package url for oci: %w", err) - } else if p != nil { - externalReferences = append(externalReferences, purlExternalReference(p.String())) - } - - if r.Metadata.ImageID != "" { - attributionTexts = appendAttributionText(attributionTexts, PropertyImageID, r.Metadata.ImageID) - } - if r.Metadata.Size != 0 { - attributionTexts = appendAttributionText(attributionTexts, PropertySize, strconv.FormatInt(r.Metadata.Size, 10)) + if root.PkgID.PURL != nil { + externalReferences = append(externalReferences, m.purlExternalReference(root.PkgID.PURL.String())) } - for _, d := range r.Metadata.RepoDigests { - attributionTexts = appendAttributionText(attributionTexts, PropertyRepoDigest, d) - } - for _, d := range r.Metadata.DiffIDs { - attributionTexts = appendAttributionText(attributionTexts, PropertyDiffID, d) - } - for _, t := range r.Metadata.RepoTags { - attributionTexts = appendAttributionText(attributionTexts, PropertyRepoTag, t) - } - - pkgID, err := calcPkgID(m.hasher, fmt.Sprintf("%s-%s", r.ArtifactName, r.ArtifactType)) + pkgID, err := calcPkgID(m.hasher, fmt.Sprintf("%s-%s", root.Name, root.Type)) if err != nil { return nil, xerrors.Errorf("failed to get %s package ID: %w", pkgID, err) } pkgPurpose := PackagePurposeSource - if r.ArtifactType == ftypes.ArtifactContainerImage { + if root.Type == core.TypeContainerImage { pkgPurpose = PackagePurposeContainer } return &spdx.Package{ - PackageName: r.ArtifactName, - PackageSPDXIdentifier: elementID(camelCase(string(r.ArtifactType)), pkgID), + PackageName: root.Name, + PackageSPDXIdentifier: elementID(camelCase(string(root.Type)), pkgID), PackageDownloadLocation: pkgDownloadLocation, - PackageAttributionTexts: attributionTexts, + PackageAttributionTexts: m.spdxAttributionTexts(root), PackageExternalReferences: externalReferences, PrimaryPackagePurpose: pkgPurpose, }, nil } -func (m *Marshaler) osPackage(osFound *ftypes.OS, pkgDownloadLocation string) (spdx.Package, error) { - if osFound == nil { - return spdx.Package{}, nil - } - - pkgID, err := calcPkgID(m.hasher, osFound) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to get os metadata package ID: %w", err) +func (m *Marshaler) appendAttributionText(attributionTexts []string, key, value string) []string { + if value == "" { + return attributionTexts } - - return spdx.Package{ - PackageName: string(osFound.Family), - PackageVersion: osFound.Name, - PackageSPDXIdentifier: elementID(ElementOperatingSystem, pkgID), - PackageDownloadLocation: pkgDownloadLocation, - PrimaryPackagePurpose: PackagePurposeOS, - }, nil + return append(attributionTexts, fmt.Sprintf("%s: %s", key, value)) } -func (m *Marshaler) langPackage(target, pkgDownloadLocation string, appType ftypes.LangType) (spdx.Package, error) { - pkgID, err := calcPkgID(m.hasher, fmt.Sprintf("%s-%s", target, appType)) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to get %s package ID: %w", target, err) +func (m *Marshaler) purlExternalReference(packageURL string) *spdx.PackageExternalReference { + return &spdx.PackageExternalReference{ + Category: CategoryPackageManager, + RefType: RefTypePurl, + Locator: packageURL, } - - return spdx.Package{ - PackageName: string(appType), - PackageSourceInfo: target, // TODO: Files seems better - PackageSPDXIdentifier: elementID(ElementApplication, pkgID), - PackageDownloadLocation: pkgDownloadLocation, - PrimaryPackagePurpose: PackagePurposeApplication, - }, nil } -func (m *Marshaler) pkgToSpdxPackage(t ftypes.TargetType, pkgDownloadLocation string, class types.ResultClass, metadata types.Metadata, pkg ftypes.Package) (spdx.Package, error) { - license := GetLicense(pkg) - - pkgID, err := calcPkgID(m.hasher, pkg) +func (m *Marshaler) spdxPackage(c *core.Component, pkgDownloadLocation string) (spdx.Package, error) { + pkgID, err := calcPkgID(m.hasher, c) if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to get %s package ID: %w", pkg.Name, err) + return spdx.Package{}, xerrors.Errorf("failed to get os metadata package ID: %w", err) } - var pkgSrcInfo string - if class == types.ClassOSPkg && pkg.SrcName != "" { - pkgSrcInfo = fmt.Sprintf("%s: %s %s", SourcePackagePrefix, pkg.SrcName, utils.FormatSrcVersion(pkg)) + var elementType, purpose, license, sourceInfo string + var supplier *spdx.Supplier + switch c.Type { + case core.TypeOS: + elementType = ElementOperatingSystem + purpose = PackagePurposeOS + case core.TypeApplication: + elementType = ElementApplication + purpose = PackagePurposeApplication + case core.TypeLibrary: + elementType = ElementPackage + purpose = PackagePurposeLibrary + license = m.spdxLicense(c) + + if c.SrcName != "" { + sourceInfo = fmt.Sprintf("%s: %s %s", SourcePackagePrefix, c.SrcName, c.SrcVersion) + } else if c.SrcFile != "" { + sourceInfo = fmt.Sprintf("%s: %s", SourceFilePrefix, c.SrcFile) + } + + supplier = &spdx.Supplier{Supplier: PackageSupplierNoAssertion} + if c.Supplier != "" { + supplier = &spdx.Supplier{ + SupplierType: PackageSupplierOrganization, // Always use "Organization" at the moment as it is difficult to distinguish between "Person" or "Organization". + Supplier: c.Supplier, + } + } } var pkgExtRefs []*spdx.PackageExternalReference - if pkg.Identifier.PURL != nil { - pkgExtRefs = []*spdx.PackageExternalReference{purlExternalReference(pkg.Identifier.PURL.String())} + if c.PkgID.PURL != nil { + pkgExtRefs = []*spdx.PackageExternalReference{m.purlExternalReference(c.PkgID.PURL.String())} } - var attrTexts []string - attrTexts = appendAttributionText(attrTexts, PropertyPkgID, pkg.ID) - attrTexts = appendAttributionText(attrTexts, PropertyLayerDigest, pkg.Layer.Digest) - attrTexts = appendAttributionText(attrTexts, PropertyLayerDiffID, pkg.Layer.DiffID) - - supplier := &spdx.Supplier{Supplier: PackageSupplierNoAssertion} - if pkg.Maintainer != "" { - supplier = &spdx.Supplier{ - SupplierType: PackageSupplierOrganization, // Always use "Organization" at the moment as it is difficult to distinguish between "Person" or "Organization". - Supplier: pkg.Maintainer, + var digests []digest.Digest + for _, f := range c.Files { + // The file digests are stored separately. + if f.Path != "" { + continue } - } - - var checksum []spdx.Checksum - if pkg.Digest != "" && class == types.ClassOSPkg { - checksum = digestToSpdxFileChecksum(pkg.Digest) + digests = append(digests, f.Digests...) } return spdx.Package{ - PackageName: pkg.Name, - PackageVersion: utils.FormatVersion(pkg), - PackageSPDXIdentifier: elementID(ElementPackage, pkgID), - PackageDownloadLocation: pkgDownloadLocation, - PackageSourceInfo: pkgSrcInfo, + PackageSPDXIdentifier: elementID(elementType, pkgID), + PackageName: spdxPkgName(c), + PackageVersion: c.Version, + PrimaryPackagePurpose: purpose, + PackageDownloadLocation: pkgDownloadLocation, + PackageExternalReferences: pkgExtRefs, + PackageAttributionTexts: m.spdxAttributionTexts(c), + PackageSourceInfo: sourceInfo, + PackageSupplier: supplier, + PackageChecksums: m.spdxChecksums(digests), // The Declared License is what the authors of a project believe govern the package PackageLicenseConcluded: license, // The Concluded License field is the license the SPDX file creator believes governs the package PackageLicenseDeclared: license, - - PackageExternalReferences: pkgExtRefs, - PackageAttributionTexts: attrTexts, - PrimaryPackagePurpose: PackagePurposeLibrary, - PackageSupplier: supplier, - PackageChecksums: checksum, }, nil } -func (m *Marshaler) pkgFiles(pkg ftypes.Package) ([]*spdx.File, error) { - if pkg.FilePath == "" { - return nil, nil +func spdxPkgName(component *core.Component) string { + if p := component.PkgID.PURL; p != nil && component.Group != "" { + if p.Type == packageurl.TypeMaven || p.Type == packageurl.TypeGradle { + return component.Group + ":" + component.Name + } + return component.Group + "/" + component.Name } + return component.Name +} - file, err := m.parseFile(pkg.FilePath, pkg.Digest) - if err != nil { - return nil, xerrors.Errorf("failed to parse file: %w", err) +func (m *Marshaler) spdxAttributionTexts(c *core.Component) []string { + var texts []string + for _, p := range c.Properties { + // Add properties that are not in other fields. + if !slices.Contains(duplicateProperties, p.Name) { + texts = m.appendAttributionText(texts, p.Name, p.Value) + } } - return []*spdx.File{ - &file, - }, nil + return texts } -func elementID(elementType, pkgID string) spdx.ElementID { - return spdx.ElementID(fmt.Sprintf("%s-%s", elementType, pkgID)) +func (m *Marshaler) spdxLicense(c *core.Component) string { + if len(c.Licenses) == 0 { + return noneField + } + return NormalizeLicense(c.Licenses) +} + +func (m *Marshaler) spdxChecksums(digests []digest.Digest) []common.Checksum { + var checksums []common.Checksum + for _, d := range digests { + var alg spdx.ChecksumAlgorithm + switch d.Algorithm() { + case digest.SHA1: + alg = spdx.SHA1 + case digest.SHA256: + alg = spdx.SHA256 + case digest.MD5: + alg = spdx.MD5 + default: + return nil + } + checksums = append(checksums, spdx.Checksum{ + Algorithm: alg, + Value: d.Encoded(), + }) + } + + return checksums +} + +func (m *Marshaler) spdxFiles(c *core.Component) ([]*spdx.File, error) { + var files []*spdx.File + for _, file := range c.Files { + if file.Path == "" || len(file.Digests) == 0 { + continue + } + spdxFile, err := m.spdxFile(file.Path, file.Digests) + if err != nil { + return nil, xerrors.Errorf("failed to parse file: %w", err) + } + files = append(files, spdxFile) + } + return files, nil +} + +func (m *Marshaler) spdxFile(filePath string, digests []digest.Digest) (*spdx.File, error) { + pkgID, err := calcPkgID(m.hasher, filePath) + if err != nil { + return nil, xerrors.Errorf("failed to get %s package ID: %w", filePath, err) + } + return &spdx.File{ + FileSPDXIdentifier: spdx.ElementID(fmt.Sprintf("File-%s", pkgID)), + FileName: filePath, + Checksums: m.spdxChecksums(digests), + }, nil } -func relationShip(refA, refB spdx.ElementID, operator string) *spdx.Relationship { +func (m *Marshaler) spdxRelationShip(refA, refB spdx.ElementID, operator string) *spdx.Relationship { ref := spdx.Relationship{ RefA: common.MakeDocElementID("", string(refA)), RefB: common.MakeDocElementID("", string(refB)), @@ -405,51 +424,65 @@ func relationShip(refA, refB spdx.ElementID, operator string) *spdx.Relationship return &ref } -func appendAttributionText(attributionTexts []string, key, value string) []string { - if value == "" { - return attributionTexts +func (m *Marshaler) spdxRelationshipType(relType core.RelationshipType) string { + switch relType { + case core.RelationshipDependsOn: + return RelationShipDependsOn + case core.RelationshipContains: + return RelationShipContains + case core.RelationshipDescribes: + return RelationShipDescribe + default: + return RelationShipDependsOn } - return append(attributionTexts, attributionText(key, value)) } -func attributionText(key, value string) string { - return fmt.Sprintf("%s: %s", key, value) +func sortPackages(pkgs []*spdx.Package) { + sort.Slice(pkgs, func(i, j int) bool { + switch { + case pkgs[i].PrimaryPackagePurpose != pkgs[j].PrimaryPackagePurpose: + return pkgs[i].PrimaryPackagePurpose < pkgs[j].PrimaryPackagePurpose + case pkgs[i].PackageName != pkgs[j].PackageName: + return pkgs[i].PackageName < pkgs[j].PackageName + default: + return pkgs[i].PackageSPDXIdentifier < pkgs[j].PackageSPDXIdentifier + } + }) } -func purlExternalReference(packageURL string) *spdx.PackageExternalReference { - return &spdx.PackageExternalReference{ - Category: CategoryPackageManager, - RefType: RefTypePurl, - Locator: packageURL, - } +func sortRelationships(rels []*spdx.Relationship) { + sort.Slice(rels, func(i, j int) bool { + switch { + case rels[i].RefA.ElementRefID != rels[j].RefA.ElementRefID: + return rels[i].RefA.ElementRefID < rels[j].RefA.ElementRefID + case rels[i].RefB.ElementRefID != rels[j].RefB.ElementRefID: + return rels[i].RefB.ElementRefID < rels[j].RefB.ElementRefID + default: + return rels[i].Relationship < rels[j].Relationship + } + }) } -func GetLicense(p ftypes.Package) string { - if len(p.Licenses) == 0 { - return noneField - } - - license := strings.Join(lo.Map(p.Licenses, func(license string, index int) string { - // e.g. GPL-3.0-with-autoconf-exception - license = strings.ReplaceAll(license, "-with-", " WITH ") - license = strings.ReplaceAll(license, "-WITH-", " WITH ") +func sortFiles(files []*spdx.File) { + sort.Slice(files, func(i, j int) bool { + switch { + case files[i].FileName != files[j].FileName: + return files[i].FileName < files[j].FileName + default: + return files[i].FileSPDXIdentifier < files[j].FileSPDXIdentifier + } + }) +} - return fmt.Sprintf("(%s)", license) - }), " AND ") - s, err := expression.Normalize(license, licensing.Normalize, expression.NormalizeForSPDX) - if err != nil { - // Not fail on the invalid license - log.Logger.Warnf("Unable to marshal SPDX licenses %q", license) - return "" - } - return s +func elementID(elementType, pkgID string) spdx.ElementID { + return spdx.ElementID(fmt.Sprintf("%s-%s", elementType, pkgID)) } -func getDocumentNamespace(r types.Report, m *Marshaler) string { +func getDocumentNamespace(root *core.Component) string { return fmt.Sprintf("%s/%s/%s-%s", DocumentNamespace, - string(r.ArtifactType), - strings.ReplaceAll(strings.ReplaceAll(r.ArtifactName, "https://", ""), "http://", ""), // remove http(s):// prefix when scanning repos + string(root.Type), + strings.ReplaceAll(strings.ReplaceAll(root.Name, "https://", ""), "http://", ""), // remove http(s):// prefix when scanning repos uuid.New().String(), ) } @@ -487,40 +520,19 @@ func camelCase(inputUnderScoreStr string) (camelCase string) { return } -func getPackageDownloadLocation(t ftypes.ArtifactType, artifactName string) string { - location := noneField - // this field is used for git/mercurial/subversion/bazaar: - // https://spdx.github.io/spdx-spec/v2.2.2/package-information/#77-package-download-location-field - if t == ftypes.ArtifactRepository { - // Trivy currently only supports git repositories. Format examples: - // git+https://git.myproject.org/MyProject.git - // git+http://git.myproject.org/MyProject - location = fmt.Sprintf("git+%s", artifactName) - } - return location -} - -func digestToSpdxFileChecksum(d digest.Digest) []common.Checksum { - if d == "" { - return nil - } - - var alg spdx.ChecksumAlgorithm - switch d.Algorithm() { - case digest.SHA1: - alg = spdx.SHA1 - case digest.SHA256: - alg = spdx.SHA256 - case digest.MD5: - alg = spdx.MD5 - default: - return nil - } +func NormalizeLicense(licenses []string) string { + license := strings.Join(lo.Map(licenses, func(license string, index int) string { + // e.g. GPL-3.0-with-autoconf-exception + license = strings.ReplaceAll(license, "-with-", " WITH ") + license = strings.ReplaceAll(license, "-WITH-", " WITH ") - return []spdx.Checksum{ - { - Algorithm: alg, - Value: d.Encoded(), - }, + return fmt.Sprintf("(%s)", license) + }), " AND ") + s, err := expression.Normalize(license, licensing.Normalize, expression.NormalizeForSPDX) + if err != nil { + // Not fail on the invalid license + log.Logger.Warnf("Unable to marshal SPDX licenses %q", license) + return "" } + return s } diff --git a/pkg/sbom/spdx/marshal_test.go b/pkg/sbom/spdx/marshal_test.go index a66a1d5ee46c..c7757de8ca81 100644 --- a/pkg/sbom/spdx/marshal_test.go +++ b/pkg/sbom/spdx/marshal_test.go @@ -2,6 +2,7 @@ package spdx_test import ( "context" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/package-url/packageurl-go" "hash/fnv" "testing" @@ -144,7 +145,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "rails:latest", - DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/rails:latest-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/rails:latest-3ff14136-e09f-4df9-80ea-000000000009", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -160,12 +161,56 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-eb0263038c3b445b"), + PackageSPDXIdentifier: spdx.ElementID("Application-9f48cdd13858abaf"), + PackageDownloadLocation: "NONE", + PackageName: "app/Gemfile.lock", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: bundler", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Application-692290f4b2235359"), + PackageDownloadLocation: "NONE", + PackageName: "app/subproject/Gemfile.lock", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: bundler", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("ContainerImage-9396d894cd0cb6cb"), + PackageDownloadLocation: "NONE", + PackageName: "rails:latest", + PackageExternalReferences: []*spdx.PackageExternalReference{ + { + Category: tspdx.CategoryPackageManager, + RefType: tspdx.RefTypePurl, + Locator: "pkg:oci/rails@sha256%3Aa27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177?arch=arm64&repository_url=index.docker.io%2Flibrary%2Frails", + }, + }, + PackageAttributionTexts: []string{ + "DiffID: sha256:d871dadfb37b53ef1ca45be04fc527562b91989991a8f545345ae3be0b93f92a", + "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", + "RepoDigest: rails@sha256:a27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177", + "RepoTag: rails:latest", + "SchemaVersion: 2", + "Size: 1024", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeContainer, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-b8d4663e6d412e7"), PackageDownloadLocation: "NONE", PackageName: "actioncontroller", PackageVersion: "7.0.1", PackageLicenseConcluded: "NONE", PackageLicenseDeclared: "NONE", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, PackageExternalReferences: []*spdx.PackageExternalReference{ { Category: tspdx.CategoryPackageManager, @@ -175,14 +220,39 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: app/subproject/Gemfile.lock", + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-3b51e821f6796568"), + PackageDownloadLocation: "NONE", + PackageName: "actionpack", + PackageVersion: "7.0.1", + PackageLicenseConcluded: "NONE", + PackageLicenseDeclared: "NONE", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, + PackageExternalReferences: []*spdx.PackageExternalReference{ + { + Category: tspdx.CategoryPackageManager, + RefType: tspdx.RefTypePurl, + Locator: "pkg:gem/actionpack@7.0.1", + }, + }, + PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, + PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: app/subproject/Gemfile.lock", }, { - PackageSPDXIdentifier: spdx.ElementID("Package-826226d056ff30c0"), + PackageSPDXIdentifier: spdx.ElementID("Package-fb5630bc7d55a21c"), PackageDownloadLocation: "NONE", PackageName: "actionpack", PackageVersion: "7.0.1", PackageLicenseConcluded: "NONE", PackageLicenseDeclared: "NONE", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, PackageExternalReferences: []*spdx.PackageExternalReference{ { Category: tspdx.CategoryPackageManager, @@ -192,14 +262,18 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: app/Gemfile.lock", }, { - PackageSPDXIdentifier: spdx.ElementID("Package-fd0dc3cf913d5bc3"), + PackageSPDXIdentifier: spdx.ElementID("Package-5d43902b18ed2e2c"), PackageDownloadLocation: "NONE", PackageName: "binutils", PackageVersion: "2.30-93.el8", PackageLicenseConcluded: "GPL-3.0-or-later", PackageLicenseDeclared: "GPL-3.0-or-later", + PackageAttributionTexts: []string{ + "PkgType: centos", + }, PackageSupplier: &spdx.Supplier{ SupplierType: tspdx.PackageSupplierOrganization, Supplier: "CentOS", @@ -221,87 +295,56 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("Application-73c871d73f3c8248"), - PackageDownloadLocation: "NONE", - PackageName: "bundler", - PackageSourceInfo: "app/subproject/Gemfile.lock", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Application-c3fac92c1ac0a9fa"), - PackageDownloadLocation: "NONE", - PackageName: "bundler", - PackageSourceInfo: "app/Gemfile.lock", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-197f9a00ebcb51f0"), + PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-20f7fa3049cc748c"), PackageDownloadLocation: "NONE", PackageName: "centos", PackageVersion: "8.3.2011", PrimaryPackagePurpose: tspdx.PackagePurposeOS, - }, - { - PackageSPDXIdentifier: spdx.ElementID("ContainerImage-9396d894cd0cb6cb"), - PackageDownloadLocation: "NONE", - PackageName: "rails:latest", - PackageExternalReferences: []*spdx.PackageExternalReference{ - { - Category: tspdx.CategoryPackageManager, - RefType: tspdx.RefTypePurl, - Locator: "pkg:oci/rails@sha256%3Aa27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177?arch=arm64&repository_url=index.docker.io%2Flibrary%2Frails", - }, - }, PackageAttributionTexts: []string{ - "SchemaVersion: 2", - "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", - "Size: 1024", - "RepoDigest: rails@sha256:a27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177", - "DiffID: sha256:d871dadfb37b53ef1ca45be04fc527562b91989991a8f545345ae3be0b93f92a", - "RepoTag: rails:latest", + "Class: os-pkgs", + "Type: centos", }, - PrimaryPackagePurpose: tspdx.PackagePurposeContainer, }, }, Relationships: []*spdx.Relationship{ { - RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - Relationship: "DESCRIBES", + RefA: spdx.DocElementID{ElementRefID: "Application-692290f4b2235359"}, + RefB: spdx.DocElementID{ElementRefID: "Package-3b51e821f6796568"}, + Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, + RefA: spdx.DocElementID{ElementRefID: "Application-692290f4b2235359"}, + RefB: spdx.DocElementID{ElementRefID: "Package-b8d4663e6d412e7"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, - RefB: spdx.DocElementID{ElementRefID: "Package-fd0dc3cf913d5bc3"}, + RefA: spdx.DocElementID{ElementRefID: "Application-9f48cdd13858abaf"}, + RefB: spdx.DocElementID{ElementRefID: "Package-fb5630bc7d55a21c"}, Relationship: "CONTAINS", }, { RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - RefB: spdx.DocElementID{ElementRefID: "Application-73c871d73f3c8248"}, + RefB: spdx.DocElementID{ElementRefID: "Application-692290f4b2235359"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-73c871d73f3c8248"}, - RefB: spdx.DocElementID{ElementRefID: "Package-826226d056ff30c0"}, + RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, + RefB: spdx.DocElementID{ElementRefID: "Application-9f48cdd13858abaf"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-73c871d73f3c8248"}, - RefB: spdx.DocElementID{ElementRefID: "Package-eb0263038c3b445b"}, + RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, + RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - RefB: spdx.DocElementID{ElementRefID: "Application-c3fac92c1ac0a9fa"}, - Relationship: "CONTAINS", + RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, + RefB: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, + Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-c3fac92c1ac0a9fa"}, - RefB: spdx.DocElementID{ElementRefID: "Package-826226d056ff30c0"}, + RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, + RefB: spdx.DocElementID{ElementRefID: "Package-5d43902b18ed2e2c"}, Relationship: "CONTAINS", }, }, @@ -420,7 +463,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "centos:latest", - DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/centos:latest-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/centos:latest-3ff14136-e09f-4df9-80ea-000000000006", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -436,12 +479,27 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-d8dccb186bafaf37"), + PackageName: "centos:latest", + PackageSPDXIdentifier: "ContainerImage-413bfede37ad01fc", + PackageDownloadLocation: "NONE", + PackageAttributionTexts: []string{ + "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", + "RepoTag: centos:latest", + "SchemaVersion: 2", + "Size: 1024", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeContainer, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-40c4059fe08523bf"), PackageDownloadLocation: "NONE", PackageName: "acl", PackageVersion: "1:2.2.53-1.el8", PackageLicenseConcluded: "GPL-2.0-or-later", PackageLicenseDeclared: "GPL-2.0-or-later", + PackageAttributionTexts: []string{ + "PkgType: centos", + }, PackageExternalReferences: []*spdx.PackageExternalReference{ { Category: tspdx.CategoryPackageManager, @@ -460,7 +518,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("Package-13fe667a0805e6b7"), + PackageSPDXIdentifier: spdx.ElementID("Package-69f68dd639314edd"), PackageDownloadLocation: "NONE", PackageName: "actionpack", PackageVersion: "7.0.1", @@ -475,6 +533,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, PackageAttributionTexts: []string{ "LayerDiffID: sha256:ccb64cf0b7ba2e50741d0b64cae324eb5de3b1e2f580bbf177e721b67df38488", + "PkgType: gemspec", }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, @@ -484,7 +543,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("Package-d5443dbcbba0dbd4"), + PackageSPDXIdentifier: spdx.ElementID("Package-da2cda24d2ecbfe6"), PackageDownloadLocation: "NONE", PackageName: "actionpack", PackageVersion: "7.0.1", @@ -499,6 +558,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, PackageAttributionTexts: []string{ "LayerDiffID: sha256:ccb64cf0b7ba2e50741d0b64cae324eb5de3b1e2f580bbf177e721b67df38488", + "PkgType: gemspec", }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, @@ -508,93 +568,73 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-197f9a00ebcb51f0"), + PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-20f7fa3049cc748c"), PackageDownloadLocation: "NONE", PackageName: "centos", PackageVersion: "8.3.2011", PrimaryPackagePurpose: tspdx.PackagePurposeOS, - }, - { - PackageName: "centos:latest", - PackageSPDXIdentifier: "ContainerImage-413bfede37ad01fc", - PackageDownloadLocation: "NONE", PackageAttributionTexts: []string{ - "SchemaVersion: 2", - "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", - "Size: 1024", - "RepoTag: centos:latest", + "Class: os-pkgs", + "Type: centos", }, - PrimaryPackagePurpose: tspdx.PackagePurposeContainer, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Application-441a648f2aeeee72"), - PackageDownloadLocation: "NONE", - PackageName: "gemspec", - PackageSourceInfo: "Ruby", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, }, }, Files: []*spdx.File{ { - FileSPDXIdentifier: "File-6a540784b0dc6d55", - FileName: "tools/project-john/specifications/actionpack.gemspec", + FileSPDXIdentifier: "File-fa42187221d0d0a8", + FileName: "tools/project-doe/specifications/actionpack.gemspec", Checksums: []spdx.Checksum{ { Algorithm: spdx.SHA1, - Value: "d2f9f9aed5161f6e4116a3f9573f41cd832f137c", + Value: "413f98442c83808042b5d1d2611a346b999bdca5", }, }, }, { - FileSPDXIdentifier: "File-fa42187221d0d0a8", - FileName: "tools/project-doe/specifications/actionpack.gemspec", + FileSPDXIdentifier: "File-6a540784b0dc6d55", + FileName: "tools/project-john/specifications/actionpack.gemspec", Checksums: []spdx.Checksum{ { Algorithm: spdx.SHA1, - Value: "413f98442c83808042b5d1d2611a346b999bdca5", + Value: "d2f9f9aed5161f6e4116a3f9573f41cd832f137c", }, }, }, }, Relationships: []*spdx.Relationship{ - { - RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, - Relationship: "DESCRIBES", - }, { RefA: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, - RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, + RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, - RefB: spdx.DocElementID{ElementRefID: "Package-d8dccb186bafaf37"}, + RefA: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, + RefB: spdx.DocElementID{ElementRefID: "Package-69f68dd639314edd"}, Relationship: "CONTAINS", }, { RefA: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, - RefB: spdx.DocElementID{ElementRefID: "Application-441a648f2aeeee72"}, + RefB: spdx.DocElementID{ElementRefID: "Package-da2cda24d2ecbfe6"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-441a648f2aeeee72"}, - RefB: spdx.DocElementID{ElementRefID: "Package-d5443dbcbba0dbd4"}, - Relationship: "CONTAINS", + RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, + RefB: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, + Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Package-d5443dbcbba0dbd4"}, - RefB: spdx.DocElementID{ElementRefID: "File-6a540784b0dc6d55"}, + RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, + RefB: spdx.DocElementID{ElementRefID: "Package-40c4059fe08523bf"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-441a648f2aeeee72"}, - RefB: spdx.DocElementID{ElementRefID: "Package-13fe667a0805e6b7"}, + RefA: spdx.DocElementID{ElementRefID: "Package-69f68dd639314edd"}, + RefB: spdx.DocElementID{ElementRefID: "File-fa42187221d0d0a8"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Package-13fe667a0805e6b7"}, - RefB: spdx.DocElementID{ElementRefID: "File-fa42187221d0d0a8"}, + RefA: spdx.DocElementID{ElementRefID: "Package-da2cda24d2ecbfe6"}, + RefB: spdx.DocElementID{ElementRefID: "File-6a540784b0dc6d55"}, Relationship: "CONTAINS", }, }, @@ -629,6 +669,26 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, }, + { + Target: "pom.xml", + Class: types.ClassLangPkg, + Type: ftypes.Pom, + Packages: []ftypes.Package{ + { + ID: "com.example:example:1.0.0", + Name: "com.example:example", + Version: "1.0.0", + Identifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "com.example", + Name: "example", + Version: "1.0.0", + }, + }, + }, + }, + }, }, }, wantSBOM: &spdx.Document{ @@ -636,7 +696,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "masahiro331/CVE-2021-41098", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/masahiro331/CVE-2021-41098-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/masahiro331/CVE-2021-41098-3ff14136-e09f-4df9-80ea-000000000006", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -652,7 +712,27 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-3da61e86d0530402"), + PackageSPDXIdentifier: spdx.ElementID("Application-ed046c4a6b4da30f"), + PackageDownloadLocation: "NONE", + PackageName: "Gemfile.lock", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: bundler", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Application-800d9e6e0f88ab3a"), + PackageDownloadLocation: "NONE", + PackageName: "pom.xml", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: pom", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-e78eaf94802a53dc"), PackageDownloadLocation: "NONE", PackageName: "actioncable", PackageVersion: "6.1.4.1", @@ -667,13 +747,32 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: Gemfile.lock", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, }, { - PackageSPDXIdentifier: spdx.ElementID("Application-9dd4a4ba7077cc5a"), + PackageSPDXIdentifier: spdx.ElementID("Package-69cd7625c68537c7"), PackageDownloadLocation: "NONE", - PackageName: "bundler", - PackageSourceInfo: "Gemfile.lock", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageName: "com.example:example", + PackageVersion: "1.0.0", + PackageLicenseConcluded: "NONE", + PackageLicenseDeclared: "NONE", + PackageExternalReferences: []*spdx.PackageExternalReference{ + { + Category: tspdx.CategoryPackageManager, + RefType: tspdx.RefTypePurl, + Locator: "pkg:maven/com.example/example@1.0.0", + }, + }, + PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, + PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: pom.xml", + PackageAttributionTexts: []string{ + "PkgID: com.example:example:1.0.0", + "PkgType: pom", + }, }, { PackageSPDXIdentifier: spdx.ElementID("Filesystem-5af0f1f08c20909a"), @@ -686,6 +785,16 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, Relationships: []*spdx.Relationship{ + { + RefA: spdx.DocElementID{ElementRefID: "Application-800d9e6e0f88ab3a"}, + RefB: spdx.DocElementID{ElementRefID: "Package-69cd7625c68537c7"}, + Relationship: "CONTAINS", + }, + { + RefA: spdx.DocElementID{ElementRefID: "Application-ed046c4a6b4da30f"}, + RefB: spdx.DocElementID{ElementRefID: "Package-e78eaf94802a53dc"}, + Relationship: "CONTAINS", + }, { RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, RefB: spdx.DocElementID{ElementRefID: "Filesystem-5af0f1f08c20909a"}, @@ -693,12 +802,12 @@ func TestMarshaler_Marshal(t *testing.T) { }, { RefA: spdx.DocElementID{ElementRefID: "Filesystem-5af0f1f08c20909a"}, - RefB: spdx.DocElementID{ElementRefID: "Application-9dd4a4ba7077cc5a"}, + RefB: spdx.DocElementID{ElementRefID: "Application-800d9e6e0f88ab3a"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-9dd4a4ba7077cc5a"}, - RefB: spdx.DocElementID{ElementRefID: "Package-3da61e86d0530402"}, + RefA: spdx.DocElementID{ElementRefID: "Filesystem-5af0f1f08c20909a"}, + RefB: spdx.DocElementID{ElementRefID: "Application-ed046c4a6b4da30f"}, Relationship: "CONTAINS", }, }, @@ -730,6 +839,7 @@ func TestMarshaler_Marshal(t *testing.T) { Layer: ftypes.Layer{ DiffID: "sha256:661c3fd3cc16b34c070f3620ca6b03b6adac150f9a7e5d0e3c707a159990f88e", }, + Digest: "sha256:a5efa82f08774597165e8c1a102d45d0406913b74c184883ac91f409ae26009d", FilePath: "usr/local/lib/ruby/gems/3.1.0/gems/typeprof-0.21.1/vscode/package.json", }, }, @@ -741,7 +851,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "http://test-aggregate", - DocumentNamespace: "http://aquasecurity.github.io/trivy/repository/test-aggregate-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/repository/test-aggregate-3ff14136-e09f-4df9-80ea-000000000003", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -757,23 +867,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageName: "http://test-aggregate", - PackageSPDXIdentifier: "Repository-1a78857c1a6a759e", - PackageDownloadLocation: "git+http://test-aggregate", - PackageAttributionTexts: []string{ - "SchemaVersion: 2", - }, - PrimaryPackagePurpose: tspdx.PackagePurposeSource, - }, - { - PackageSPDXIdentifier: "Application-24f8a80152e2c0fc", - PackageDownloadLocation: "git+http://test-aggregate", - PackageName: "node-pkg", - PackageSourceInfo: "Node.js", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Package-daedb173cfd43058"), + PackageSPDXIdentifier: spdx.ElementID("Package-52b8e939bac2d133"), PackageDownloadLocation: "git+http://test-aggregate", PackageName: "ruby-typeprof", PackageVersion: "0.20.1", @@ -788,6 +882,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, PackageAttributionTexts: []string{ "LayerDiffID: sha256:661c3fd3cc16b34c070f3620ca6b03b6adac150f9a7e5d0e3c707a159990f88e", + "PkgType: node-pkg", }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, @@ -796,11 +891,26 @@ func TestMarshaler_Marshal(t *testing.T) { Value: "da39a3ee5e6b4b0d3255bfef95601890afd80709", }, }, + { + PackageSPDXIdentifier: "Repository-1a78857c1a6a759e", + PackageName: "http://test-aggregate", + PackageDownloadLocation: "git+http://test-aggregate", + PackageAttributionTexts: []string{ + "SchemaVersion: 2", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeSource, + }, }, Files: []*spdx.File{ { FileName: "usr/local/lib/ruby/gems/3.1.0/gems/typeprof-0.21.1/vscode/package.json", FileSPDXIdentifier: "File-a52825a3e5bc6dfe", + Checksums: []common.Checksum{ + { + Algorithm: common.SHA256, + Value: "a5efa82f08774597165e8c1a102d45d0406913b74c184883ac91f409ae26009d", + }, + }, }, }, Relationships: []*spdx.Relationship{ @@ -810,18 +920,13 @@ func TestMarshaler_Marshal(t *testing.T) { Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Repository-1a78857c1a6a759e"}, - RefB: spdx.DocElementID{ElementRefID: "Application-24f8a80152e2c0fc"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{ElementRefID: "Application-24f8a80152e2c0fc"}, - RefB: spdx.DocElementID{ElementRefID: "Package-daedb173cfd43058"}, + RefA: spdx.DocElementID{ElementRefID: "Package-52b8e939bac2d133"}, + RefB: spdx.DocElementID{ElementRefID: "File-a52825a3e5bc6dfe"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Package-daedb173cfd43058"}, - RefB: spdx.DocElementID{ElementRefID: "File-a52825a3e5bc6dfe"}, + RefA: spdx.DocElementID{ElementRefID: "Repository-1a78857c1a6a759e"}, + RefB: spdx.DocElementID{ElementRefID: "Package-52b8e939bac2d133"}, Relationship: "CONTAINS", }, }, @@ -840,7 +945,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "empty/path", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/empty/path-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/empty/path-3ff14136-e09f-4df9-80ea-000000000002", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ @@ -903,8 +1008,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "secret", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/secret-3ff14136-e09f-4df9-80ea-000000000001", - + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/secret-3ff14136-e09f-4df9-80ea-000000000002", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -946,7 +1050,7 @@ func TestMarshaler_Marshal(t *testing.T) { ArtifactType: ftypes.ArtifactFilesystem, Results: types.Results{ { - Target: "artifact", + Target: "/usr/local/bin/test", Class: types.ClassLangPkg, Type: ftypes.GoBinary, Packages: []ftypes.Package{ @@ -975,7 +1079,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "go-artifact", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/go-artifact-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/go-artifact-3ff14136-e09f-4df9-80ea-000000000005", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -991,7 +1095,17 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-9164ae38c5cdf815"), + PackageSPDXIdentifier: spdx.ElementID("Application-aab0f4e8cf174c67"), + PackageDownloadLocation: "NONE", + PackageName: "/usr/local/bin/test", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: gobinary", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-9a16e221e11f8a90"), PackageDownloadLocation: "NONE", PackageName: "./private_repos/cnrm.googlesource.com/cnrm/", PackageVersion: "(devel)", @@ -999,25 +1113,13 @@ func TestMarshaler_Marshal(t *testing.T) { PackageLicenseDeclared: "NONE", PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, - }, - { - PackageName: "go-artifact", - PackageSPDXIdentifier: "Filesystem-e340f27468b382be", - PackageDownloadLocation: "NONE", + PackageSourceInfo: "package found in: /usr/local/bin/test", PackageAttributionTexts: []string{ - "SchemaVersion: 2", + "PkgType: gobinary", }, - PrimaryPackagePurpose: tspdx.PackagePurposeSource, }, { - PackageSPDXIdentifier: spdx.ElementID("Application-6666b83a5d554671"), - PackageDownloadLocation: "NONE", - PackageName: "gobinary", - PackageSourceInfo: "artifact", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Package-8451f2bc8e1f45aa"), + PackageSPDXIdentifier: spdx.ElementID("Package-b9b7ae633941e083"), PackageDownloadLocation: "NONE", PackageName: "golang.org/x/crypto", PackageVersion: "v0.0.1", @@ -1032,27 +1134,40 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: /usr/local/bin/test", + PackageAttributionTexts: []string{ + "PkgType: gobinary", + }, + }, + { + PackageName: "go-artifact", + PackageSPDXIdentifier: "Filesystem-e340f27468b382be", + PackageDownloadLocation: "NONE", + PackageAttributionTexts: []string{ + "SchemaVersion: 2", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeSource, }, }, Relationships: []*spdx.Relationship{ { - RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, - Relationship: "DESCRIBES", + RefA: spdx.DocElementID{ElementRefID: "Application-aab0f4e8cf174c67"}, + RefB: spdx.DocElementID{ElementRefID: "Package-9a16e221e11f8a90"}, + Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, - RefB: spdx.DocElementID{ElementRefID: "Application-6666b83a5d554671"}, + RefA: spdx.DocElementID{ElementRefID: "Application-aab0f4e8cf174c67"}, + RefB: spdx.DocElementID{ElementRefID: "Package-b9b7ae633941e083"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-6666b83a5d554671"}, - RefB: spdx.DocElementID{ElementRefID: "Package-9164ae38c5cdf815"}, - Relationship: "CONTAINS", + RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, + RefB: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, + Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-6666b83a5d554671"}, - RefB: spdx.DocElementID{ElementRefID: "Package-8451f2bc8e1f45aa"}, + RefA: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, + RefB: spdx.DocElementID{ElementRefID: "Application-aab0f4e8cf174c67"}, Relationship: "CONTAINS", }, }, @@ -1064,17 +1179,18 @@ func TestMarshaler_Marshal(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // Fake function calculating the hash value h := fnv.New64() - hasher := func(v interface{}, format hashstructure.Format, opts *hashstructure.HashOptions) (uint64, error) { + hasher := func(v any, format hashstructure.Format, opts *hashstructure.HashOptions) (uint64, error) { h.Reset() var str string - switch v.(type) { - case ftypes.Package: - str = v.(ftypes.Package).Name + v.(ftypes.Package).FilePath + switch vv := v.(type) { + case *core.Component: + str = vv.Name + vv.Version + vv.SrcFile + for _, f := range vv.Files { + str += f.Path + } case string: - str = v.(string) - case *ftypes.OS: - str = v.(*ftypes.OS).Name + str = vv default: require.Failf(t, "unknown type", "%T", v) } @@ -1090,7 +1206,7 @@ func TestMarshaler_Marshal(t *testing.T) { uuid.SetFakeUUID(t, "3ff14136-e09f-4df9-80ea-%012d") marshaler := tspdx.NewMarshaler("0.38.1", tspdx.WithHasher(hasher)) - spdxDoc, err := marshaler.Marshal(ctx, tc.inputReport) + spdxDoc, err := marshaler.MarshalReport(ctx, tc.inputReport) require.NoError(t, err) assert.Equal(t, tc.wantSBOM, spdxDoc) @@ -1101,62 +1217,52 @@ func TestMarshaler_Marshal(t *testing.T) { func Test_GetLicense(t *testing.T) { tests := []struct { name string - input ftypes.Package + input []string want string }{ { name: "happy path", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - }, + input: []string{ + "GPLv2+", }, want: "GPL-2.0-or-later", }, { name: "happy path with multi license", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - "GPLv3+", - }, + input: []string{ + "GPLv2+", + "GPLv3+", }, want: "GPL-2.0-or-later AND GPL-3.0-or-later", }, { name: "happy path with OR operator", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - "LGPL 2.0 or GNU LESSER", - }, + input: []string{ + "GPLv2+", + "LGPL 2.0 or GNU LESSER", }, want: "GPL-2.0-or-later AND (LGPL-2.0-only OR LGPL-3.0-only)", }, { name: "happy path with AND operator", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - "LGPL 2.0 and GNU LESSER", - }, + input: []string{ + "GPLv2+", + "LGPL 2.0 and GNU LESSER", }, want: "GPL-2.0-or-later AND LGPL-2.0-only AND LGPL-3.0-only", }, { name: "happy path with WITH operator", - input: ftypes.Package{ - Licenses: []string{ - "AFL 2.0", - "AFL 3.0 with distribution exception", - }, + input: []string{ + "AFL 2.0", + "AFL 3.0 with distribution exception", }, want: "AFL-2.0 AND AFL-3.0 WITH distribution-exception", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - assert.Equalf(t, tt.want, tspdx.GetLicense(tt.input), "getLicense(%v)", tt.input) + assert.Equal(t, tt.want, tspdx.NormalizeLicense(tt.input)) }) } } diff --git a/pkg/sbom/spdx/testdata/happy/with-file-as-relationship-parent.json b/pkg/sbom/spdx/testdata/happy/with-file-as-relationship-parent.json new file mode 100644 index 000000000000..798e75d0a52a --- /dev/null +++ b/pkg/sbom/spdx/testdata/happy/with-file-as-relationship-parent.json @@ -0,0 +1,54 @@ +{ + "files": [ + { + "fileName": "./Modules/Microsoft.PowerShell.PSResourceGet/_manifest/spdx_2.2/manifest.spdx.json", + "SPDXID": "SPDXRef-File--Modules-Microsoft.PowerShell.PSResourceGet--manifest-spdx-2.2-manifest.spdx.json-2B9FB98F5CA97DC84FD382A8F8E68F663C003362", + "checksums": [ + { + "algorithm": "SHA256", + "checksumValue": "4201b0989938842ef8c11a006184e0b1466bd7f9bb2af61d89a4c8318d43466e" + }, + { + "algorithm": "SHA1", + "checksumValue": "2b9fb98f5ca97dc84fd382a8f8e68f663c003362" + } + ], + "licenseConcluded": "NOASSERTION", + "licenseInfoInFiles": [ + "NOASSERTION" + ], + "copyrightText": "NOASSERTION", + "fileTypes": [ + "SPDX" + ] + } + ], + "externalDocumentRefs": [], + "relationships": [ + { + "relationshipType": "DESCRIBES", + "relatedSpdxElement": "SPDXRef-RootPackage", + "spdxElementId": "SPDXRef-DOCUMENT" + }, + { + "relationshipType": "DESCRIBED_BY", + "relatedSpdxElement": "SPDXRef-DOCUMENT", + "spdxElementId": "SPDXRef-File--Modules-Microsoft.PowerShell.PSResourceGet--manifest-spdx-2.2-manifest.spdx.json-2B9FB98F5CA97DC84FD382A8F8E68F663C003362" + } + ], + "spdxVersion": "SPDX-2.2", + "dataLicense": "CC0-1.0", + "SPDXID": "SPDXRef-DOCUMENT", + "name": "PowerShell Linux Arm32 7.5.0-preview.2", + "documentNamespace": "https://sbom.microsoft/1:2QSF7qZlbE-F7QrUJlEo7g:pHp_nUFvDUijZ4LrJ4RhoQ/696:458654/PowerShell%20Linux%20Arm32:7.5.0-preview.2:pDkyTHXmgUOdzSXIq9CiqA", + "creationInfo": { + "created": "2024-02-22T00:43:53Z", + "creators": [ + "Organization: Microsoft", + "Tool: Microsoft.SBOMTool-2.2.3" + ] + }, + "documentDescribes": [ + "SPDXRef-RootPackage" + ] +} \ No newline at end of file diff --git a/pkg/sbom/spdx/testdata/sad/invalid-source-info.json b/pkg/sbom/spdx/testdata/sad/invalid-purl.json similarity index 92% rename from pkg/sbom/spdx/testdata/sad/invalid-source-info.json rename to pkg/sbom/spdx/testdata/sad/invalid-purl.json index 1c761c1f53fa..da87237d54b7 100644 --- a/pkg/sbom/spdx/testdata/sad/invalid-source-info.json +++ b/pkg/sbom/spdx/testdata/sad/invalid-purl.json @@ -27,13 +27,13 @@ "externalRefs": [ { "referenceCategory": "PACKAGE-MANAGER", - "referenceLocator": "pkg:apk/alpine/musl@1.2.3-r0?distro=3.16.0", + "referenceLocator": "pkg:invalid", "referenceType": "purl" } ], "filesAnalyzed": false, "name": "musl", - "sourceInfo": "built package from: invalid", + "sourceInfo": "built package from: musl", "versionInfo": "1.2.3-r0" } ], diff --git a/pkg/sbom/spdx/unmarshal.go b/pkg/sbom/spdx/unmarshal.go index 718bdd608886..bda18c16980a 100644 --- a/pkg/sbom/spdx/unmarshal.go +++ b/pkg/sbom/spdx/unmarshal.go @@ -2,13 +2,10 @@ package spdx import ( "bytes" - "errors" "fmt" "io" - "sort" "strings" - version "github.com/knqyf263/go-rpm-version" "github.com/package-url/packageurl-go" "github.com/samber/lo" "github.com/spdx/tools-golang/json" @@ -17,17 +14,14 @@ import ( "github.com/spdx/tools-golang/tagvalue" "golang.org/x/xerrors" - ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" - "github.com/aquasecurity/trivy/pkg/purl" - "github.com/aquasecurity/trivy/pkg/types" -) - -var ( - errUnknownPackageFormat = xerrors.New("unknown package format") + "github.com/aquasecurity/trivy/pkg/sbom/core" ) type SPDX struct { - *types.SBOM + *core.BOM + + trivySBOM bool + pkgFilePaths map[common.ElementID]string } func NewTVDecoder(r io.Reader) *TVDecoder { @@ -48,8 +42,7 @@ func (tv *TVDecoder) Decode(v interface{}) error { if !ok { return xerrors.Errorf("invalid struct type tag-value decoder needed SPDX struct") } - err = a.unmarshal(spdxDocument) - if err != nil { + if err = a.unmarshal(spdxDocument); err != nil { return xerrors.Errorf("failed to unmarshal spdx: %w", err) } @@ -57,292 +50,231 @@ func (tv *TVDecoder) Decode(v interface{}) error { } func (s *SPDX) UnmarshalJSON(b []byte) error { + if s.BOM == nil { + s.BOM = core.NewBOM(core.Options{}) + } + if s.pkgFilePaths == nil { + s.pkgFilePaths = make(map[common.ElementID]string) + } + spdxDocument, err := json.Read(bytes.NewReader(b)) if err != nil { return xerrors.Errorf("failed to load spdx json: %w", err) } - err = s.unmarshal(spdxDocument) - if err != nil { + + if err = s.unmarshal(spdxDocument); err != nil { return xerrors.Errorf("failed to unmarshal spdx: %w", err) } return nil } func (s *SPDX) unmarshal(spdxDocument *spdx.Document) error { - var osPkgs []ftypes.Package - apps := make(map[common.ElementID]*ftypes.Application) - packageSPDXIdentifierMap := createPackageSPDXIdentifierMap(spdxDocument.Packages) - packageFilePaths := getPackageFilePaths(spdxDocument) + s.trivySBOM = s.isTrivySBOM(spdxDocument) - // Hold packages that are not processed by relationships - orphanPkgs := createPackageSPDXIdentifierMap(spdxDocument.Packages) + // Parse files and find file paths for packages + s.parseFiles(spdxDocument) - relationships := lo.Filter(spdxDocument.Relationships, func(rel *spdx.Relationship, _ int) bool { - // Skip the DESCRIBES relationship. - return rel.Relationship != common.TypeRelationshipDescribe && rel.Relationship != "DESCRIBE" - }) + // Convert all SPDX packages into Trivy components + components, err := s.parsePackages(spdxDocument) + if err != nil { + return xerrors.Errorf("package parse error: %w", err) + } - // Package relationships would be as belows: - // - Root (container image, filesystem, etc.) - // - Operating System (debian 10) - // - OS package A - // - OS package B - // - Application 1 (package-lock.json) - // - Node.js package A - // - Node.js package B - // - Application 2 (Pipfile.lock) - // - Python package A - // - Python package B - for _, rel := range relationships { - pkgA := packageSPDXIdentifierMap[rel.RefA.ElementRefID] - pkgB := packageSPDXIdentifierMap[rel.RefB.ElementRefID] - - if pkgA == nil || pkgB == nil { - // Skip the missing pkg relationship. + // Parse relationships and build the dependency graph + for _, rel := range spdxDocument.Relationships { + // Skip the DESCRIBES relationship. + if rel.Relationship == common.TypeRelationshipDescribe || rel.Relationship == "DESCRIBE" { continue } - switch { - // Relationship: root package => OS - case isOperatingSystem(pkgB.PackageSPDXIdentifier): - s.SBOM.Metadata.OS = parseOS(*pkgB) - delete(orphanPkgs, pkgB.PackageSPDXIdentifier) - // Relationship: OS => OS package - case isOperatingSystem(pkgA.PackageSPDXIdentifier): - pkg, _, err := parsePkg(*pkgB, packageFilePaths) - if errors.Is(err, errUnknownPackageFormat) { - continue - } else if err != nil { - return xerrors.Errorf("failed to parse os package: %w", err) - } - osPkgs = append(osPkgs, *pkg) - delete(orphanPkgs, pkgB.PackageSPDXIdentifier) - // Relationship: root package => application - case isApplication(pkgB.PackageSPDXIdentifier): - // pass - // Relationship: application => language-specific package - case isApplication(pkgA.PackageSPDXIdentifier): - app, ok := apps[pkgA.PackageSPDXIdentifier] - if !ok { - app = initApplication(*pkgA) - apps[pkgA.PackageSPDXIdentifier] = app - } - - lib, _, err := parsePkg(*pkgB, packageFilePaths) - if errors.Is(err, errUnknownPackageFormat) { - continue - } else if err != nil { - return xerrors.Errorf("failed to parse language-specific package: %w", err) - } - app.Libraries = append(app.Libraries, *lib) - - // They are no longer orphan packages - delete(orphanPkgs, pkgA.PackageSPDXIdentifier) - delete(orphanPkgs, pkgB.PackageSPDXIdentifier) + compA, ok := components[rel.RefA.ElementRefID] + if !ok { // Skip if parent is not Package + continue } - } - - // Fill OS packages - if len(osPkgs) > 0 { - s.Packages = []ftypes.PackageInfo{{Packages: osPkgs}} - } - // Fill applications - for _, app := range apps { - s.SBOM.Applications = append(s.SBOM.Applications, *app) - } + compB, ok := components[rel.RefB.ElementRefID] + if !ok { // Skip if child is not Package + continue + } - // Fallback for when there are no effective relationships. - if err := s.parsePackages(orphanPkgs); err != nil { - return err + s.BOM.AddRelationship(compA, compB, s.parseRelationshipType(rel.Relationship)) } return nil } -// parsePackages processes the packages and categorizes them into OS packages and application packages. -// Note that all language-specific packages are treated as a single application. -func (s *SPDX) parsePackages(pkgs map[common.ElementID]*spdx.Package) error { - var ( - osPkgs []ftypes.Package - apps = make(map[ftypes.LangType]ftypes.Application) - ) - - for _, p := range pkgs { - pkg, pkgURL, err := parsePkg(*p, nil) - if errors.Is(err, errUnknownPackageFormat) { +// parseFiles parses Relationships and finds filepaths for packages +func (s *SPDX) parseFiles(spdxDocument *spdx.Document) { + fileSPDXIdentifierMap := lo.SliceToMap(spdxDocument.Files, func(file *spdx.File) (common.ElementID, *spdx.File) { + return file.FileSPDXIdentifier, file + }) + + for _, rel := range spdxDocument.Relationships { + if rel.Relationship != common.TypeRelationshipContains && rel.Relationship != "CONTAIN" { + // Skip the DESCRIBES relationship. continue - } else if err != nil { - return xerrors.Errorf("failed to parse package: %w", err) } - switch pkgURL.Class() { - case types.ClassOSPkg: - osPkgs = append(osPkgs, *pkg) - case types.ClassLangPkg: - // Language-specific packages - pkgType := pkgURL.LangType() - app, ok := apps[pkgType] - if !ok { - app.Type = pkgType + + // hasFiles field is deprecated + // https://github.com/spdx/tools-golang/issues/171 + // hasFiles values converted in Relationships + // https://github.com/spdx/tools-golang/pull/201 + if isFile(rel.RefB.ElementRefID) { + file, ok := fileSPDXIdentifierMap[rel.RefB.ElementRefID] + if ok { + // Save filePaths for packages + // Insert filepath will be later + s.pkgFilePaths[rel.RefA.ElementRefID] = file.FileName } - app.Libraries = append(app.Libraries, *pkg) - apps[pkgType] = app + continue } } - if len(osPkgs) > 0 { - s.Packages = []ftypes.PackageInfo{{Packages: osPkgs}} - } - for _, app := range apps { - sort.Sort(app.Libraries) - s.SBOM.Applications = append(s.SBOM.Applications, app) - } - return nil } -func createPackageSPDXIdentifierMap(packages []*spdx.Package) map[common.ElementID]*spdx.Package { - return lo.SliceToMap(packages, func(pkg *spdx.Package) (common.ElementID, *spdx.Package) { - return pkg.PackageSPDXIdentifier, pkg - }) -} - -func createFileSPDXIdentifierMap(files []*spdx.File) map[string]*spdx.File { - ret := make(map[string]*spdx.File) - for _, file := range files { - ret[string(file.FileSPDXIdentifier)] = file +func (s *SPDX) parsePackages(spdxDocument *spdx.Document) (map[common.ElementID]*core.Component, error) { + // Find a root package + var rootID common.ElementID + for _, rel := range spdxDocument.Relationships { + if rel.RefA.ElementRefID == DocumentSPDXIdentifier && rel.Relationship == RelationShipDescribe { + rootID = rel.RefB.ElementRefID + break + } } - return ret -} - -func isOperatingSystem(elementID spdx.ElementID) bool { - return strings.HasPrefix(string(elementID), ElementOperatingSystem) -} -func isApplication(elementID spdx.ElementID) bool { - return strings.HasPrefix(string(elementID), ElementApplication) -} - -func isFile(elementID spdx.ElementID) bool { - return strings.HasPrefix(string(elementID), ElementFile) -} + // Convert packages into components + components := make(map[common.ElementID]*core.Component) + for _, pkg := range spdxDocument.Packages { + component, err := s.parsePackage(*pkg) + if err != nil { + return nil, xerrors.Errorf("failed to parse package: %w", err) + } + components[pkg.PackageSPDXIdentifier] = component -func initApplication(pkg spdx.Package) *ftypes.Application { - app := &ftypes.Application{Type: ftypes.LangType(pkg.PackageName)} - switch app.Type { - case ftypes.NodePkg, ftypes.PythonPkg, ftypes.GemSpec, ftypes.Jar, ftypes.CondaPkg: - app.FilePath = "" - default: - app.FilePath = pkg.PackageSourceInfo + if pkg.PackageSPDXIdentifier == rootID { + component.Root = true + } + s.BOM.AddComponent(component) } - - return app + return components, nil } -func parseOS(pkg spdx.Package) *ftypes.OS { - return &ftypes.OS{ - Family: ftypes.OSType(pkg.PackageName), - Name: pkg.PackageVersion, +func (s *SPDX) parsePackage(spdxPkg spdx.Package) (*core.Component, error) { + var err error + component := &core.Component{ + Type: s.parseType(spdxPkg), + Name: spdxPkg.PackageName, + Version: spdxPkg.PackageVersion, } -} -func parsePkg(spdxPkg spdx.Package, packageFilePaths map[string]string) (*ftypes.Package, *purl.PackageURL, error) { - pkgURL, err := parseExternalReferences(spdxPkg.PackageExternalReferences) - if err != nil { - return nil, nil, xerrors.Errorf("external references error: %w", err) + // PURL + if component.PkgID.PURL, err = s.parseExternalReferences(spdxPkg.PackageExternalReferences); err != nil { + return nil, xerrors.Errorf("external references error: %w", err) } - pkg := pkgURL.Package() + // License if spdxPkg.PackageLicenseDeclared != "NONE" { - pkg.Licenses = strings.Split(spdxPkg.PackageLicenseDeclared, ",") + component.Licenses = strings.Split(spdxPkg.PackageLicenseDeclared, ",") } + // Source package if strings.HasPrefix(spdxPkg.PackageSourceInfo, SourcePackagePrefix) { srcPkgName := strings.TrimPrefix(spdxPkg.PackageSourceInfo, fmt.Sprintf("%s: ", SourcePackagePrefix)) - pkg.SrcEpoch, pkg.SrcName, pkg.SrcVersion, pkg.SrcRelease, err = parseSourceInfo(pkgURL.Type, srcPkgName) - if err != nil { - return nil, nil, xerrors.Errorf("failed to parse source info: %w", err) - } + component.SrcName, component.SrcVersion, _ = strings.Cut(srcPkgName, " ") } - if path, ok := packageFilePaths[string(spdxPkg.PackageSPDXIdentifier)]; ok { - pkg.FilePath = path + // Files + // TODO: handle checksums as well + if path, ok := s.pkgFilePaths[spdxPkg.PackageSPDXIdentifier]; ok { + component.Files = []core.File{ + {Path: path}, + } } else if len(spdxPkg.Files) > 0 { - // Take the first file name - pkg.FilePath = spdxPkg.Files[0].FileName + component.Files = []core.File{ + {Path: spdxPkg.Files[0].FileName}, // Take the first file name + } } - pkg.ID = lookupAttributionTexts(spdxPkg.PackageAttributionTexts, PropertyPkgID) - pkg.Layer.Digest = lookupAttributionTexts(spdxPkg.PackageAttributionTexts, PropertyLayerDigest) - pkg.Layer.DiffID = lookupAttributionTexts(spdxPkg.PackageAttributionTexts, PropertyLayerDiffID) + // Attributions + for _, attr := range spdxPkg.PackageAttributionTexts { + k, v, ok := strings.Cut(attr, ": ") + if !ok { + continue + } + component.Properties = append(component.Properties, core.Property{ + Name: k, + Value: v, + }) + } + + // For backward-compatibility + // Older Trivy versions put the file path in "sourceInfo" and the package type in "name". + if s.trivySBOM && component.Type == core.TypeApplication && spdxPkg.PackageSourceInfo != "" { + component.Name = spdxPkg.PackageSourceInfo + component.Properties = append(component.Properties, core.Property{ + Name: core.PropertyType, + Value: spdxPkg.PackageName, + }) + } - return pkg, pkgURL, nil + return component, nil } -func parseExternalReferences(refs []*spdx.PackageExternalReference) (*purl.PackageURL, error) { +func (s *SPDX) parseType(pkg spdx.Package) core.ComponentType { + id := string(pkg.PackageSPDXIdentifier) + switch { + case strings.HasPrefix(id, ElementOperatingSystem): + return core.TypeOS + case strings.HasPrefix(id, ElementApplication): + return core.TypeApplication + case strings.HasPrefix(id, ElementPackage): + return core.TypeLibrary + default: + return core.TypeLibrary // unknown is handled as a library + } +} + +func (s *SPDX) parseRelationshipType(rel string) core.RelationshipType { + switch rel { + case common.TypeRelationshipDescribe: + return core.RelationshipDescribes + case common.TypeRelationshipContains, "CONTAIN": + return core.RelationshipContains + case common.TypeRelationshipDependsOn: + return core.RelationshipDependsOn + default: + return core.RelationshipContains + } +} + +func (s *SPDX) parseExternalReferences(refs []*spdx.PackageExternalReference) (*packageurl.PackageURL, error) { for _, ref := range refs { // Extract the package information from PURL if ref.RefType != RefTypePurl || ref.Category != CategoryPackageManager { continue } - packageURL, err := purl.FromString(ref.Locator) + packageURL, err := packageurl.FromString(ref.Locator) if err != nil { return nil, xerrors.Errorf("failed to parse purl from string: %w", err) } - return packageURL, nil + return &packageURL, nil } - return nil, errUnknownPackageFormat + return nil, nil } -func lookupAttributionTexts(attributionTexts []string, key string) string { - for _, text := range attributionTexts { - if strings.HasPrefix(text, key) { - return strings.TrimPrefix(text, fmt.Sprintf("%s: ", key)) - } +func (s *SPDX) isTrivySBOM(spdxDocument *spdx.Document) bool { + if spdxDocument == nil || spdxDocument.CreationInfo == nil || spdxDocument.CreationInfo.Creators == nil { + return false } - return "" -} -func parseSourceInfo(pkgType, sourceInfo string) (epoch int, name, ver, rel string, err error) { - srcNameVersion := strings.TrimPrefix(sourceInfo, fmt.Sprintf("%s: ", SourcePackagePrefix)) - ss := strings.Split(srcNameVersion, " ") - if len(ss) != 2 { - return 0, "", "", "", xerrors.Errorf("invalid source info (%s)", sourceInfo) - } - name = ss[0] - if pkgType == packageurl.TypeRPM { - v := version.NewVersion(ss[1]) - epoch = v.Epoch() - ver = v.Version() - rel = v.Release() - } else { - ver = ss[1] + for _, c := range spdxDocument.CreationInfo.Creators { + if c.CreatorType == "Tool" && strings.HasPrefix(c.Creator, "trivy") { + return true + } } - return epoch, name, ver, rel, nil + return false } -// getPackageFilePaths parses Relationships and finds filepaths for packages -func getPackageFilePaths(spdxDocument *spdx.Document) map[string]string { - packageFilePaths := make(map[string]string) - fileSPDXIdentifierMap := createFileSPDXIdentifierMap(spdxDocument.Files) - for _, rel := range spdxDocument.Relationships { - if rel.Relationship != common.TypeRelationshipContains && rel.Relationship != "CONTAIN" { - // Skip the DESCRIBES relationship. - continue - } - - // hasFiles field is deprecated - // https://github.com/spdx/tools-golang/issues/171 - // hasFiles values converted in Relationships - // https://github.com/spdx/tools-golang/pull/201 - if isFile(rel.RefB.ElementRefID) { - file, ok := fileSPDXIdentifierMap[string(rel.RefB.ElementRefID)] - if ok { - // Save filePaths for packages - // Insert filepath will be later - packageFilePaths[string(rel.RefA.ElementRefID)] = file.FileName - } - continue - } - } - return packageFilePaths +func isFile(elementID spdx.ElementID) bool { + return strings.HasPrefix(string(elementID), ElementFile) } diff --git a/pkg/sbom/spdx/unmarshal_test.go b/pkg/sbom/spdx/unmarshal_test.go index cee50461508e..73f7d2dc934f 100644 --- a/pkg/sbom/spdx/unmarshal_test.go +++ b/pkg/sbom/spdx/unmarshal_test.go @@ -2,6 +2,7 @@ package spdx_test import ( "encoding/json" + sbomio "github.com/aquasecurity/trivy/pkg/sbom/io" "github.com/package-url/packageurl-go" "os" "sort" @@ -27,6 +28,15 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { inputFile: "testdata/happy/bom.json", want: types.SBOM{ Metadata: types.Metadata{ + ImageID: "sha256:49193a2310dbad4c02382da87ac624a80a92387a4f7536235f9ba590e5bcd7b5", + DiffIDs: []string{ + "sha256:3c79e832b1b4891a1cb4a326ef8524e0bd14a2537150ac0e203a5677176c1ca1", + "sha256:dd565ff850e7003356e2b252758f9bdc1ff2803f61e995e24c7844f6297f8fc3", + }, + RepoTags: []string{ + "maven-test-project:latest", + "tmp-test:latest", + }, OS: &ftypes.OS{ Family: "alpine", Name: "3.16.0", @@ -36,6 +46,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { { Packages: ftypes.Packages{ { + ID: "musl@1.2.3-r0", Name: "musl", Version: "1.2.3-r0", SrcName: "musl", @@ -68,6 +79,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { FilePath: "app/composer/composer.lock", Libraries: ftypes.Packages{ { + ID: "pear/log@1.13.1", Name: "pear/log", Version: "1.13.1", Identifier: ftypes.PkgIdentifier{ @@ -83,7 +95,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, { - + ID: "pear/pear_exception@v1.0.0", Name: "pear/pear_exception", Version: "v1.0.0", Identifier: ftypes.PkgIdentifier{ @@ -105,6 +117,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { FilePath: "app/gobinary/gobinary", Libraries: ftypes.Packages{ { + ID: "github.com/package-url/packageurl-go@v0.1.1-0.20220203205134-d70459300c8a", Name: "github.com/package-url/packageurl-go", Version: "v0.1.1-0.20220203205134-d70459300c8a", Identifier: ftypes.PkgIdentifier{ @@ -125,6 +138,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { Type: "jar", Libraries: ftypes.Packages{ { + ID: "org.codehaus.mojo:child-project:1.0", Name: "org.codehaus.mojo:child-project", Identifier: ftypes.PkgIdentifier{ PURL: &packageurl.PackageURL{ @@ -145,6 +159,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { Type: "node-pkg", Libraries: ftypes.Packages{ { + ID: "bootstrap@5.0.2", Name: "bootstrap", Version: "5.0.2", Identifier: ftypes.PkgIdentifier{ @@ -170,7 +185,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { want: types.SBOM{ Applications: []ftypes.Application{ { - Type: "node-pkg", + Type: ftypes.NodePkg, Libraries: ftypes.Packages{ { ID: "yargs-parser@21.1.1", @@ -228,6 +243,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { FilePath: "app/composer/composer.lock", Libraries: ftypes.Packages{ { + ID: "pear/log@1.13.1", Name: "pear/log", Version: "1.13.1", Identifier: ftypes.PkgIdentifier{ @@ -240,7 +256,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, { - + ID: "pear/pear_exception@v1.0.0", Name: "pear/pear_exception", Version: "v1.0.0", Identifier: ftypes.PkgIdentifier{ @@ -266,9 +282,10 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { Type: ftypes.Jar, Libraries: ftypes.Packages{ { - FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", + ID: "co.elastic.apm:apm-agent:1.36.0", Name: "co.elastic.apm:apm-agent", Version: "1.36.0", + FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", Identifier: ftypes.PkgIdentifier{ PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, @@ -279,9 +296,10 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, { - FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", + ID: "co.elastic.apm:apm-agent-cached-lookup-key:1.36.0", Name: "co.elastic.apm:apm-agent-cached-lookup-key", Version: "1.36.0", + FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", Identifier: ftypes.PkgIdentifier{ PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, @@ -296,6 +314,11 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, }, + { + name: "happy path with file as parent of relationship", + inputFile: "testdata/happy/with-file-as-relationship-parent.json", + want: types.SBOM{}, + }, { name: "happy path only os component", inputFile: "testdata/happy/os-only-bom.json", @@ -315,8 +338,8 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, { name: "sad path invalid purl", - inputFile: "testdata/sad/invalid-source-info.json", - wantErr: "failed to parse source info:", + inputFile: "testdata/sad/invalid-purl.json", + wantErr: "purl is missing type or name", }, } @@ -326,22 +349,24 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { require.NoError(t, err) defer f.Close() - v := &spdx.SPDX{SBOM: &types.SBOM{}} - err = json.NewDecoder(f).Decode(v) + var v spdx.SPDX + err = json.NewDecoder(f).Decode(&v) if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + assert.ErrorContains(t, err, tt.wantErr) return } - // Not compare the SPDX field - v.BOM = nil + var got types.SBOM + err = sbomio.NewDecoder(v.BOM).Decode(&got) + require.NoError(t, err) + + // Not compare BOM + got.BOM = nil - sort.Slice(v.Applications, func(i, j int) bool { - return v.Applications[i].Type < v.Applications[j].Type + sort.Slice(got.Applications, func(i, j int) bool { + return got.Applications[i].Type < got.Applications[j].Type }) - require.NoError(t, err) - assert.Equal(t, tt.want, *v.SBOM) + assert.Equal(t, tt.want, got) }) } } diff --git a/pkg/vex/csaf.go b/pkg/vex/csaf.go index 33c5c8975b3e..d5d68f76adb9 100644 --- a/pkg/vex/csaf.go +++ b/pkg/vex/csaf.go @@ -8,6 +8,7 @@ import ( "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/purl" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/aquasecurity/trivy/pkg/types" ) @@ -23,7 +24,7 @@ func newCSAF(advisory csaf.Advisory) VEX { } } -func (v *CSAF) Filter(result *types.Result) { +func (v *CSAF) Filter(result *types.Result, _ *core.BOM) { result.Vulnerabilities = lo.Filter(result.Vulnerabilities, func(vuln types.DetectedVulnerability, _ int) bool { found, ok := lo.Find(v.advisory.Vulnerabilities, func(item *csaf.Vulnerability) bool { return string(*item.CVE) == vuln.VulnerabilityID diff --git a/pkg/vex/cyclonedx.go b/pkg/vex/cyclonedx.go index a956703da3ae..685fefebf304 100644 --- a/pkg/vex/cyclonedx.go +++ b/pkg/vex/cyclonedx.go @@ -45,7 +45,7 @@ func newCycloneDX(sbom *core.BOM, vex *cdx.BOM) *CycloneDX { } } -func (v *CycloneDX) Filter(result *types.Result) { +func (v *CycloneDX) Filter(result *types.Result, _ *core.BOM) { result.Vulnerabilities = lo.Filter(result.Vulnerabilities, func(vuln types.DetectedVulnerability, _ int) bool { stmt, ok := lo.Find(v.statements, func(item Statement) bool { return item.VulnerabilityID == vuln.VulnerabilityID diff --git a/pkg/vex/openvex.go b/pkg/vex/openvex.go index 24e2bb6cca9c..a6cae6de7ac8 100644 --- a/pkg/vex/openvex.go +++ b/pkg/vex/openvex.go @@ -4,6 +4,7 @@ import ( openvex "github.com/openvex/go-vex/pkg/vex" "github.com/samber/lo" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/aquasecurity/trivy/pkg/types" ) @@ -17,13 +18,13 @@ func newOpenVEX(vex openvex.VEX) VEX { } } -func (v *OpenVEX) Filter(result *types.Result) { +func (v *OpenVEX) Filter(result *types.Result, bom *core.BOM) { result.Vulnerabilities = lo.Filter(result.Vulnerabilities, func(vuln types.DetectedVulnerability, _ int) bool { if vuln.PkgIdentifier.PURL == nil { return true } - stmts := v.vex.Matches(vuln.VulnerabilityID, vuln.PkgIdentifier.PURL.String(), nil) + stmts := v.Matches(vuln, bom) if len(stmts) == 0 { return true } @@ -41,6 +42,17 @@ func (v *OpenVEX) Filter(result *types.Result) { }) } +func (v *OpenVEX) Matches(vuln types.DetectedVulnerability, bom *core.BOM) []openvex.Statement { + root := bom.Root() + if root != nil && root.PkgID.PURL != nil { + stmts := v.vex.Matches(vuln.VulnerabilityID, root.PkgID.PURL.String(), []string{vuln.PkgIdentifier.PURL.String()}) + if len(stmts) != 0 { + return stmts + } + } + return v.vex.Matches(vuln.VulnerabilityID, vuln.PkgIdentifier.PURL.String(), nil) +} + func findingStatus(status openvex.Status) types.FindingStatus { switch status { case openvex.StatusNotAffected: diff --git a/pkg/vex/testdata/openvex-oci.json b/pkg/vex/testdata/openvex-oci.json new file mode 100644 index 000000000000..667ca5e3d049 --- /dev/null +++ b/pkg/vex/testdata/openvex-oci.json @@ -0,0 +1,26 @@ +{ + "@context": "https://openvex.dev/ns/v0.2.0", + "author": "Aqua Security", + "role": "Project Release Bot", + "timestamp": "2023-01-16T19:07:16.853479631-06:00", + "version": 1, + "statements": [ + { + "vulnerability": { + "name": "CVE-2022-3715" + }, + "products": [ + { + "@id": "pkg:oci/debian", + "subcomponents": [ + { + "@id": "pkg:deb/debian/bash" + } + ] + } + ], + "status": "not_affected", + "justification": "vulnerable_code_not_in_execute_path" + } + ] +} diff --git a/pkg/vex/vex.go b/pkg/vex/vex.go index dc2c118b56bc..0e47bf03bf52 100644 --- a/pkg/vex/vex.go +++ b/pkg/vex/vex.go @@ -13,6 +13,7 @@ import ( ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/sbom" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/aquasecurity/trivy/pkg/sbom/cyclonedx" "github.com/aquasecurity/trivy/pkg/types" ) @@ -21,7 +22,7 @@ import ( // Note: This is in the experimental stage and does not yet support many specifications. // The implementation may change significantly. type VEX interface { - Filter(*types.Result) + Filter(*types.Result, *core.BOM) } func New(filePath string, report types.Report) (VEX, error) { diff --git a/pkg/vex/vex_test.go b/pkg/vex/vex_test.go index d591ccfdc6c6..77d2aff3c63e 100644 --- a/pkg/vex/vex_test.go +++ b/pkg/vex/vex_test.go @@ -16,6 +16,48 @@ import ( "github.com/aquasecurity/trivy/pkg/vex" ) +var ( + vuln1 = types.DetectedVulnerability{ + VulnerabilityID: "CVE-2021-44228", + PkgName: "spring-boot", + InstalledVersion: "2.6.0", + PkgIdentifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "org.springframework.boot", + Name: "spring-boot", + Version: "2.6.0", + }, + }, + } + vuln2 = types.DetectedVulnerability{ + VulnerabilityID: "CVE-2021-0001", + PkgName: "spring-boot", + InstalledVersion: "2.6.0", + PkgIdentifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "org.springframework.boot", + Name: "spring-boot", + Version: "2.6.0", + }, + }, + } + vuln3 = types.DetectedVulnerability{ + VulnerabilityID: "CVE-2022-3715", + PkgName: "bash", + InstalledVersion: "5.2.15", + PkgIdentifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeDebian, + Namespace: "debian", + Name: "bash", + Version: "5.2.15", + }, + }, + } +) + func TestMain(m *testing.M) { log.InitLogger(false, true) os.Exit(m.Run()) @@ -28,6 +70,7 @@ func TestVEX_Filter(t *testing.T) { } type args struct { vulns []types.DetectedVulnerability + bom *core.BOM } tests := []struct { name string @@ -42,21 +85,8 @@ func TestVEX_Filter(t *testing.T) { filePath: "testdata/openvex.json", }, args: args{ - vulns: []types.DetectedVulnerability{ - { - VulnerabilityID: "CVE-2021-44228", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, - }, - }, + vulns: []types.DetectedVulnerability{vuln1}, + bom: newTestBOM(), }, want: []types.DetectedVulnerability{}, }, @@ -67,49 +97,38 @@ func TestVEX_Filter(t *testing.T) { }, args: args{ vulns: []types.DetectedVulnerability{ - { - VulnerabilityID: "CVE-2021-44228", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, - }, - { - VulnerabilityID: "CVE-2021-0001", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, - }, + vuln1, // filtered by VEX + vuln2, }, + bom: newTestBOM(), }, want: []types.DetectedVulnerability{ - { - VulnerabilityID: "CVE-2021-0001", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, + vuln2, + }, + }, + { + name: "OpenVEX, subcomponents, oci image", + fields: fields{ + filePath: "testdata/openvex-oci.json", + }, + args: args{ + vulns: []types.DetectedVulnerability{ + vuln3, }, + bom: newTestBOM(), + }, + want: []types.DetectedVulnerability{}, + }, + { + name: "OpenVEX, subcomponents, wrong oci image", + fields: fields{ + filePath: "testdata/openvex-oci.json", + }, + args: args{ + vulns: []types.DetectedVulnerability{vuln3}, + bom: newTestBOM2(), }, + want: []types.DetectedVulnerability{vuln3}, }, { name: "CycloneDX SBOM with CycloneDX VEX", @@ -347,8 +366,62 @@ func TestVEX_Filter(t *testing.T) { got := &types.Result{ Vulnerabilities: tt.args.vulns, } - v.Filter(got) + v.Filter(got, tt.args.bom) assert.Equal(t, tt.want, got.Vulnerabilities) }) } } + +func newTestBOM() *core.BOM { + bom := core.NewBOM(core.Options{}) + bom.AddComponent(&core.Component{ + Root: true, + Type: core.TypeContainerImage, + Name: "debian:12", + PkgID: core.PkgID{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeOCI, + Name: "debian", + Version: "sha256:4482958b4461ff7d9fabc24b3a9ab1e9a2c85ece07b2db1840c7cbc01d053e90", + Qualifiers: packageurl.Qualifiers{ + { + Key: "tag", + Value: "12", + }, + { + Key: "repository_url", + Value: "docker.io/library/debian", + }, + }, + }, + }, + }) + return bom +} + +func newTestBOM2() *core.BOM { + bom := core.NewBOM(core.Options{}) + bom.AddComponent(&core.Component{ + Root: true, + Type: core.TypeContainerImage, + Name: "ubuntu:24.04", + PkgID: core.PkgID{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeOCI, + Name: "ubuntu", + Version: "sha256:4482958b4461ff7d9fabc24b3a9ab1e9a2c85ece07b2db1840c7cbc01d053e90", + Qualifiers: packageurl.Qualifiers{ + { + Key: "tag", + Value: "24.04", + }, + { + Key: "repository_url", + Value: "docker.io/library/ubuntu", + }, + }, + }, + }, + }) + return bom +} diff --git a/rpc/common/service.pb.go b/rpc/common/service.pb.go index 33e8f40c7c2f..0174ab2c7f5f 100644 --- a/rpc/common/service.pb.go +++ b/rpc/common/service.pb.go @@ -138,7 +138,7 @@ func (x LicenseCategory_Enum) Number() protoreflect.EnumNumber { // Deprecated: Use LicenseCategory_Enum.Descriptor instead. func (LicenseCategory_Enum) EnumDescriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{23, 0} + return file_rpc_common_service_proto_rawDescGZIP(), []int{24, 0} } type LicenseType_Enum int32 @@ -190,7 +190,7 @@ func (x LicenseType_Enum) Number() protoreflect.EnumNumber { // Deprecated: Use LicenseType_Enum.Descriptor instead. func (LicenseType_Enum) EnumDescriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{24, 0} + return file_rpc_common_service_proto_rawDescGZIP(), []int{25, 0} } type OS struct { @@ -453,17 +453,18 @@ type Package struct { Arch string `protobuf:"bytes,5,opt,name=arch,proto3" json:"arch,omitempty"` // src package containing some binary packages // e.g. bind - SrcName string `protobuf:"bytes,6,opt,name=src_name,json=srcName,proto3" json:"src_name,omitempty"` - SrcVersion string `protobuf:"bytes,7,opt,name=src_version,json=srcVersion,proto3" json:"src_version,omitempty"` - SrcRelease string `protobuf:"bytes,8,opt,name=src_release,json=srcRelease,proto3" json:"src_release,omitempty"` - SrcEpoch int32 `protobuf:"varint,9,opt,name=src_epoch,json=srcEpoch,proto3" json:"src_epoch,omitempty"` - Licenses []string `protobuf:"bytes,15,rep,name=licenses,proto3" json:"licenses,omitempty"` - Layer *Layer `protobuf:"bytes,11,opt,name=layer,proto3" json:"layer,omitempty"` - FilePath string `protobuf:"bytes,12,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` - DependsOn []string `protobuf:"bytes,14,rep,name=depends_on,json=dependsOn,proto3" json:"depends_on,omitempty"` - Digest string `protobuf:"bytes,16,opt,name=digest,proto3" json:"digest,omitempty"` - Dev bool `protobuf:"varint,17,opt,name=dev,proto3" json:"dev,omitempty"` - Indirect bool `protobuf:"varint,18,opt,name=indirect,proto3" json:"indirect,omitempty"` + SrcName string `protobuf:"bytes,6,opt,name=src_name,json=srcName,proto3" json:"src_name,omitempty"` + SrcVersion string `protobuf:"bytes,7,opt,name=src_version,json=srcVersion,proto3" json:"src_version,omitempty"` + SrcRelease string `protobuf:"bytes,8,opt,name=src_release,json=srcRelease,proto3" json:"src_release,omitempty"` + SrcEpoch int32 `protobuf:"varint,9,opt,name=src_epoch,json=srcEpoch,proto3" json:"src_epoch,omitempty"` + Licenses []string `protobuf:"bytes,15,rep,name=licenses,proto3" json:"licenses,omitempty"` + Locations []*Location `protobuf:"bytes,20,rep,name=locations,proto3" json:"locations,omitempty"` + Layer *Layer `protobuf:"bytes,11,opt,name=layer,proto3" json:"layer,omitempty"` + FilePath string `protobuf:"bytes,12,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` + DependsOn []string `protobuf:"bytes,14,rep,name=depends_on,json=dependsOn,proto3" json:"depends_on,omitempty"` + Digest string `protobuf:"bytes,16,opt,name=digest,proto3" json:"digest,omitempty"` + Dev bool `protobuf:"varint,17,opt,name=dev,proto3" json:"dev,omitempty"` + Indirect bool `protobuf:"varint,18,opt,name=indirect,proto3" json:"indirect,omitempty"` } func (x *Package) Reset() { @@ -582,6 +583,13 @@ func (x *Package) GetLicenses() []string { return nil } +func (x *Package) GetLocations() []*Location { + if x != nil { + return x.Locations + } + return nil +} + func (x *Package) GetLayer() *Layer { if x != nil { return x.Layer @@ -679,6 +687,61 @@ func (x *PkgIdentifier) GetBomRef() string { return "" } +type Location struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + StartLine int32 `protobuf:"varint,1,opt,name=start_line,json=startLine,proto3" json:"start_line,omitempty"` + EndLine int32 `protobuf:"varint,2,opt,name=end_line,json=endLine,proto3" json:"end_line,omitempty"` +} + +func (x *Location) Reset() { + *x = Location{} + if protoimpl.UnsafeEnabled { + mi := &file_rpc_common_service_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Location) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Location) ProtoMessage() {} + +func (x *Location) ProtoReflect() protoreflect.Message { + mi := &file_rpc_common_service_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Location.ProtoReflect.Descriptor instead. +func (*Location) Descriptor() ([]byte, []int) { + return file_rpc_common_service_proto_rawDescGZIP(), []int{6} +} + +func (x *Location) GetStartLine() int32 { + if x != nil { + return x.StartLine + } + return 0 +} + +func (x *Location) GetEndLine() int32 { + if x != nil { + return x.EndLine + } + return 0 +} + type Misconfiguration struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -695,7 +758,7 @@ type Misconfiguration struct { func (x *Misconfiguration) Reset() { *x = Misconfiguration{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[6] + mi := &file_rpc_common_service_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -708,7 +771,7 @@ func (x *Misconfiguration) String() string { func (*Misconfiguration) ProtoMessage() {} func (x *Misconfiguration) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[6] + mi := &file_rpc_common_service_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -721,7 +784,7 @@ func (x *Misconfiguration) ProtoReflect() protoreflect.Message { // Deprecated: Use Misconfiguration.ProtoReflect.Descriptor instead. func (*Misconfiguration) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{6} + return file_rpc_common_service_proto_rawDescGZIP(), []int{7} } func (x *Misconfiguration) GetFileType() string { @@ -780,7 +843,7 @@ type MisconfResult struct { func (x *MisconfResult) Reset() { *x = MisconfResult{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[7] + mi := &file_rpc_common_service_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -793,7 +856,7 @@ func (x *MisconfResult) String() string { func (*MisconfResult) ProtoMessage() {} func (x *MisconfResult) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[7] + mi := &file_rpc_common_service_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -806,7 +869,7 @@ func (x *MisconfResult) ProtoReflect() protoreflect.Message { // Deprecated: Use MisconfResult.ProtoReflect.Descriptor instead. func (*MisconfResult) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{7} + return file_rpc_common_service_proto_rawDescGZIP(), []int{8} } func (x *MisconfResult) GetNamespace() string { @@ -855,7 +918,7 @@ type PolicyMetadata struct { func (x *PolicyMetadata) Reset() { *x = PolicyMetadata{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[8] + mi := &file_rpc_common_service_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -868,7 +931,7 @@ func (x *PolicyMetadata) String() string { func (*PolicyMetadata) ProtoMessage() {} func (x *PolicyMetadata) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[8] + mi := &file_rpc_common_service_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -881,7 +944,7 @@ func (x *PolicyMetadata) ProtoReflect() protoreflect.Message { // Deprecated: Use PolicyMetadata.ProtoReflect.Descriptor instead. func (*PolicyMetadata) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{8} + return file_rpc_common_service_proto_rawDescGZIP(), []int{9} } func (x *PolicyMetadata) GetId() string { @@ -965,7 +1028,7 @@ type DetectedMisconfiguration struct { func (x *DetectedMisconfiguration) Reset() { *x = DetectedMisconfiguration{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[9] + mi := &file_rpc_common_service_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -978,7 +1041,7 @@ func (x *DetectedMisconfiguration) String() string { func (*DetectedMisconfiguration) ProtoMessage() {} func (x *DetectedMisconfiguration) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[9] + mi := &file_rpc_common_service_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -991,7 +1054,7 @@ func (x *DetectedMisconfiguration) ProtoReflect() protoreflect.Message { // Deprecated: Use DetectedMisconfiguration.ProtoReflect.Descriptor instead. func (*DetectedMisconfiguration) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{9} + return file_rpc_common_service_proto_rawDescGZIP(), []int{10} } func (x *DetectedMisconfiguration) GetType() string { @@ -1133,7 +1196,7 @@ type Vulnerability struct { func (x *Vulnerability) Reset() { *x = Vulnerability{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[10] + mi := &file_rpc_common_service_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1146,7 +1209,7 @@ func (x *Vulnerability) String() string { func (*Vulnerability) ProtoMessage() {} func (x *Vulnerability) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[10] + mi := &file_rpc_common_service_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1159,7 +1222,7 @@ func (x *Vulnerability) ProtoReflect() protoreflect.Message { // Deprecated: Use Vulnerability.ProtoReflect.Descriptor instead. func (*Vulnerability) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{10} + return file_rpc_common_service_proto_rawDescGZIP(), []int{11} } func (x *Vulnerability) GetVulnerabilityId() string { @@ -1343,7 +1406,7 @@ type DataSource struct { func (x *DataSource) Reset() { *x = DataSource{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[11] + mi := &file_rpc_common_service_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1356,7 +1419,7 @@ func (x *DataSource) String() string { func (*DataSource) ProtoMessage() {} func (x *DataSource) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[11] + mi := &file_rpc_common_service_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1369,7 +1432,7 @@ func (x *DataSource) ProtoReflect() protoreflect.Message { // Deprecated: Use DataSource.ProtoReflect.Descriptor instead. func (*DataSource) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{11} + return file_rpc_common_service_proto_rawDescGZIP(), []int{12} } func (x *DataSource) GetId() string { @@ -1406,7 +1469,7 @@ type Layer struct { func (x *Layer) Reset() { *x = Layer{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[12] + mi := &file_rpc_common_service_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1419,7 +1482,7 @@ func (x *Layer) String() string { func (*Layer) ProtoMessage() {} func (x *Layer) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[12] + mi := &file_rpc_common_service_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1432,7 +1495,7 @@ func (x *Layer) ProtoReflect() protoreflect.Message { // Deprecated: Use Layer.ProtoReflect.Descriptor instead. func (*Layer) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{12} + return file_rpc_common_service_proto_rawDescGZIP(), []int{13} } func (x *Layer) GetDigest() string { @@ -1472,7 +1535,7 @@ type CauseMetadata struct { func (x *CauseMetadata) Reset() { *x = CauseMetadata{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[13] + mi := &file_rpc_common_service_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1485,7 +1548,7 @@ func (x *CauseMetadata) String() string { func (*CauseMetadata) ProtoMessage() {} func (x *CauseMetadata) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[13] + mi := &file_rpc_common_service_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1498,7 +1561,7 @@ func (x *CauseMetadata) ProtoReflect() protoreflect.Message { // Deprecated: Use CauseMetadata.ProtoReflect.Descriptor instead. func (*CauseMetadata) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{13} + return file_rpc_common_service_proto_rawDescGZIP(), []int{14} } func (x *CauseMetadata) GetResource() string { @@ -1557,7 +1620,7 @@ type CVSS struct { func (x *CVSS) Reset() { *x = CVSS{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[14] + mi := &file_rpc_common_service_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1570,7 +1633,7 @@ func (x *CVSS) String() string { func (*CVSS) ProtoMessage() {} func (x *CVSS) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[14] + mi := &file_rpc_common_service_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1583,7 +1646,7 @@ func (x *CVSS) ProtoReflect() protoreflect.Message { // Deprecated: Use CVSS.ProtoReflect.Descriptor instead. func (*CVSS) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{14} + return file_rpc_common_service_proto_rawDescGZIP(), []int{15} } func (x *CVSS) GetV2Vector() string { @@ -1628,7 +1691,7 @@ type CustomResource struct { func (x *CustomResource) Reset() { *x = CustomResource{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[15] + mi := &file_rpc_common_service_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1641,7 +1704,7 @@ func (x *CustomResource) String() string { func (*CustomResource) ProtoMessage() {} func (x *CustomResource) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[15] + mi := &file_rpc_common_service_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1654,7 +1717,7 @@ func (x *CustomResource) ProtoReflect() protoreflect.Message { // Deprecated: Use CustomResource.ProtoReflect.Descriptor instead. func (*CustomResource) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{15} + return file_rpc_common_service_proto_rawDescGZIP(), []int{16} } func (x *CustomResource) GetType() string { @@ -1703,7 +1766,7 @@ type Line struct { func (x *Line) Reset() { *x = Line{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[16] + mi := &file_rpc_common_service_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1716,7 +1779,7 @@ func (x *Line) String() string { func (*Line) ProtoMessage() {} func (x *Line) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[16] + mi := &file_rpc_common_service_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1729,7 +1792,7 @@ func (x *Line) ProtoReflect() protoreflect.Message { // Deprecated: Use Line.ProtoReflect.Descriptor instead. func (*Line) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{16} + return file_rpc_common_service_proto_rawDescGZIP(), []int{17} } func (x *Line) GetNumber() int32 { @@ -1799,7 +1862,7 @@ type Code struct { func (x *Code) Reset() { *x = Code{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[17] + mi := &file_rpc_common_service_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1812,7 +1875,7 @@ func (x *Code) String() string { func (*Code) ProtoMessage() {} func (x *Code) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[17] + mi := &file_rpc_common_service_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1825,7 +1888,7 @@ func (x *Code) ProtoReflect() protoreflect.Message { // Deprecated: Use Code.ProtoReflect.Descriptor instead. func (*Code) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{17} + return file_rpc_common_service_proto_rawDescGZIP(), []int{18} } func (x *Code) GetLines() []*Line { @@ -1854,7 +1917,7 @@ type SecretFinding struct { func (x *SecretFinding) Reset() { *x = SecretFinding{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[18] + mi := &file_rpc_common_service_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1867,7 +1930,7 @@ func (x *SecretFinding) String() string { func (*SecretFinding) ProtoMessage() {} func (x *SecretFinding) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[18] + mi := &file_rpc_common_service_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1880,7 +1943,7 @@ func (x *SecretFinding) ProtoReflect() protoreflect.Message { // Deprecated: Use SecretFinding.ProtoReflect.Descriptor instead. func (*SecretFinding) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{18} + return file_rpc_common_service_proto_rawDescGZIP(), []int{19} } func (x *SecretFinding) GetRuleId() string { @@ -1958,7 +2021,7 @@ type Secret struct { func (x *Secret) Reset() { *x = Secret{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[19] + mi := &file_rpc_common_service_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1971,7 +2034,7 @@ func (x *Secret) String() string { func (*Secret) ProtoMessage() {} func (x *Secret) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[19] + mi := &file_rpc_common_service_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1984,7 +2047,7 @@ func (x *Secret) ProtoReflect() protoreflect.Message { // Deprecated: Use Secret.ProtoReflect.Descriptor instead. func (*Secret) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{19} + return file_rpc_common_service_proto_rawDescGZIP(), []int{20} } func (x *Secret) GetFilepath() string { @@ -2018,7 +2081,7 @@ type DetectedLicense struct { func (x *DetectedLicense) Reset() { *x = DetectedLicense{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[20] + mi := &file_rpc_common_service_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2031,7 +2094,7 @@ func (x *DetectedLicense) String() string { func (*DetectedLicense) ProtoMessage() {} func (x *DetectedLicense) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[20] + mi := &file_rpc_common_service_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2044,7 +2107,7 @@ func (x *DetectedLicense) ProtoReflect() protoreflect.Message { // Deprecated: Use DetectedLicense.ProtoReflect.Descriptor instead. func (*DetectedLicense) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{20} + return file_rpc_common_service_proto_rawDescGZIP(), []int{21} } func (x *DetectedLicense) GetSeverity() Severity { @@ -2111,7 +2174,7 @@ type LicenseFile struct { func (x *LicenseFile) Reset() { *x = LicenseFile{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[21] + mi := &file_rpc_common_service_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2124,7 +2187,7 @@ func (x *LicenseFile) String() string { func (*LicenseFile) ProtoMessage() {} func (x *LicenseFile) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[21] + mi := &file_rpc_common_service_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2137,7 +2200,7 @@ func (x *LicenseFile) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseFile.ProtoReflect.Descriptor instead. func (*LicenseFile) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{21} + return file_rpc_common_service_proto_rawDescGZIP(), []int{22} } func (x *LicenseFile) GetLicenseType() LicenseType_Enum { @@ -2189,7 +2252,7 @@ type LicenseFinding struct { func (x *LicenseFinding) Reset() { *x = LicenseFinding{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[22] + mi := &file_rpc_common_service_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2202,7 +2265,7 @@ func (x *LicenseFinding) String() string { func (*LicenseFinding) ProtoMessage() {} func (x *LicenseFinding) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[22] + mi := &file_rpc_common_service_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2215,7 +2278,7 @@ func (x *LicenseFinding) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseFinding.ProtoReflect.Descriptor instead. func (*LicenseFinding) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{22} + return file_rpc_common_service_proto_rawDescGZIP(), []int{23} } func (x *LicenseFinding) GetCategory() LicenseCategory_Enum { @@ -2258,7 +2321,7 @@ type LicenseCategory struct { func (x *LicenseCategory) Reset() { *x = LicenseCategory{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[23] + mi := &file_rpc_common_service_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2271,7 +2334,7 @@ func (x *LicenseCategory) String() string { func (*LicenseCategory) ProtoMessage() {} func (x *LicenseCategory) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[23] + mi := &file_rpc_common_service_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2284,7 +2347,7 @@ func (x *LicenseCategory) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseCategory.ProtoReflect.Descriptor instead. func (*LicenseCategory) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{23} + return file_rpc_common_service_proto_rawDescGZIP(), []int{24} } type LicenseType struct { @@ -2296,7 +2359,7 @@ type LicenseType struct { func (x *LicenseType) Reset() { *x = LicenseType{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[24] + mi := &file_rpc_common_service_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2309,7 +2372,7 @@ func (x *LicenseType) String() string { func (*LicenseType) ProtoMessage() {} func (x *LicenseType) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[24] + mi := &file_rpc_common_service_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2322,7 +2385,7 @@ func (x *LicenseType) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseType.ProtoReflect.Descriptor instead. func (*LicenseType) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{24} + return file_rpc_common_service_proto_rawDescGZIP(), []int{25} } var File_rpc_common_service_proto protoreflect.FileDescriptor @@ -2357,7 +2420,7 @@ var file_rpc_common_service_proto_rawDesc = []byte{ 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x33, 0x0a, 0x09, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, 0x61, 0x63, 0x6b, 0x61, - 0x67, 0x65, 0x52, 0x09, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, 0x8b, 0x04, + 0x67, 0x65, 0x52, 0x09, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, 0xc1, 0x04, 0x0a, 0x07, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, @@ -2379,330 +2442,337 @@ var file_rpc_common_service_proto_rawDesc = []byte{ 0x6c, 0x65, 0x61, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x72, 0x63, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x73, 0x72, 0x63, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x73, 0x18, 0x0f, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x73, 0x12, 0x29, - 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, - 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, - 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, - 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, - 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, - 0x73, 0x5f, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x64, 0x65, 0x70, 0x65, - 0x6e, 0x64, 0x73, 0x4f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, - 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, - 0x03, 0x64, 0x65, 0x76, 0x18, 0x11, 0x20, 0x01, 0x28, 0x08, 0x52, 0x03, 0x64, 0x65, 0x76, 0x12, - 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x18, 0x12, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x22, 0x3c, 0x0a, 0x0d, 0x50, - 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, - 0x70, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x75, 0x72, 0x6c, - 0x12, 0x17, 0x0a, 0x07, 0x62, 0x6f, 0x6d, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x62, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x22, 0xb6, 0x02, 0x0a, 0x10, 0x4d, 0x69, - 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, - 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, - 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x39, 0x0a, 0x09, 0x73, 0x75, 0x63, 0x63, - 0x65, 0x73, 0x73, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, - 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, - 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x09, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, - 0x73, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x18, - 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x73, 0x12, 0x34, + 0x0a, 0x09, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x14, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x6c, 0x6f, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0b, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, + 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, + 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x0c, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, + 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0x5f, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x09, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0x4f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x64, + 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x67, + 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x65, 0x76, 0x18, 0x11, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x03, 0x64, 0x65, 0x76, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x18, 0x12, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x22, 0x3c, 0x0a, 0x0d, 0x50, 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x70, 0x75, 0x72, 0x6c, 0x12, 0x17, 0x0a, 0x07, 0x62, 0x6f, 0x6d, 0x5f, 0x72, 0x65, + 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x62, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x22, + 0x44, 0x0a, 0x08, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, + 0x64, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, + 0x64, 0x4c, 0x69, 0x6e, 0x65, 0x22, 0xb6, 0x02, 0x0a, 0x10, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, + 0x6c, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, + 0x69, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, + 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, + 0x50, 0x61, 0x74, 0x68, 0x12, 0x39, 0x0a, 0x09, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x52, 0x09, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, 0x73, 0x12, + 0x37, 0x0a, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x08, + 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x37, 0x0a, 0x08, 0x66, 0x61, 0x69, 0x6c, + 0x75, 0x72, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, + 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, + 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x08, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, + 0x73, 0x12, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, + 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x52, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x37, 0x0a, 0x08, - 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, - 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, - 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x08, 0x66, 0x61, 0x69, - 0x6c, 0x75, 0x72, 0x65, 0x73, 0x12, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, - 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, - 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x22, 0xf3, 0x01, 0x0a, 0x0d, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x45, 0x0a, 0x0f, - 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x52, 0x0e, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x12, 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, - 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, 0x65, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x07, 0x52, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x52, 0x02, 0x69, 0x64, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x52, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x22, 0xf0, 0x01, 0x0a, 0x0e, 0x50, 0x6f, 0x6c, - 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x61, - 0x64, 0x76, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x64, 0x76, - 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, - 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x2f, 0x0a, 0x13, 0x72, 0x65, - 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, - 0x6e, 0x64, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x72, - 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0xf7, 0x03, 0x0a, 0x18, - 0x44, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, - 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, - 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, - 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1c, - 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1e, 0x0a, 0x0a, - 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0a, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, - 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, - 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, - 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x75, 0x72, 0x6c, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x55, 0x72, - 0x6c, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, - 0x0a, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, - 0x65, 0x72, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, - 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, - 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, - 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x76, 0x64, 0x5f, - 0x69, 0x64, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x76, 0x64, 0x49, 0x64, 0x12, - 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x71, 0x75, 0x65, 0x72, 0x79, 0x22, 0xff, 0x09, 0x0a, 0x0d, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x29, 0x0a, 0x10, 0x76, 0x75, 0x6c, 0x6e, 0x65, - 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0f, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, - 0x49, 0x64, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, - 0x11, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x65, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, - 0x6c, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x69, - 0x78, 0x65, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0c, 0x66, 0x69, 0x78, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, - 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, - 0x69, 0x74, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, - 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, - 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x1e, 0x0a, 0x0a, 0x72, - 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x0e, 0x70, - 0x6b, 0x67, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x19, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, - 0x6f, 0x6e, 0x2e, 0x50, 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, - 0x52, 0x0d, 0x70, 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, - 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, - 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, - 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, - 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x0b, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x12, 0x39, 0x0a, 0x04, 0x63, 0x76, 0x73, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x25, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, - 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x43, - 0x76, 0x73, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x63, 0x76, 0x73, 0x73, 0x12, 0x17, - 0x0a, 0x07, 0x63, 0x77, 0x65, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x06, 0x63, 0x77, 0x65, 0x49, 0x64, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6d, 0x61, - 0x72, 0x79, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, - 0x69, 0x6d, 0x61, 0x72, 0x79, 0x55, 0x72, 0x6c, 0x12, 0x41, 0x0a, 0x0e, 0x70, 0x75, 0x62, 0x6c, - 0x69, 0x73, 0x68, 0x65, 0x64, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x70, 0x75, - 0x62, 0x6c, 0x69, 0x73, 0x68, 0x65, 0x64, 0x44, 0x61, 0x74, 0x65, 0x12, 0x48, 0x0a, 0x12, 0x6c, - 0x61, 0x73, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x64, 0x61, 0x74, - 0x65, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x52, 0x10, 0x6c, 0x61, 0x73, 0x74, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, - 0x64, 0x44, 0x61, 0x74, 0x65, 0x12, 0x48, 0x0a, 0x14, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, - 0x61, 0x64, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x79, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x11, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, 0x63, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x41, 0x64, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x12, - 0x40, 0x0a, 0x10, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x76, 0x75, 0x6c, 0x6e, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x52, 0x0e, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x56, 0x75, 0x6c, 0x6e, 0x44, 0x61, 0x74, - 0x61, 0x12, 0x1d, 0x0a, 0x0a, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, - 0x13, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x49, 0x64, 0x73, - 0x12, 0x39, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, - 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, - 0x0a, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x58, 0x0a, 0x0f, 0x76, - 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x15, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x79, 0x2e, 0x56, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, - 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x70, 0x61, 0x74, - 0x68, 0x18, 0x16, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x50, 0x61, 0x74, 0x68, - 0x12, 0x15, 0x0a, 0x06, 0x70, 0x6b, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x17, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x70, 0x6b, 0x67, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x18, 0x18, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x1a, - 0x4b, 0x0a, 0x09, 0x43, 0x76, 0x73, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, - 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x56, 0x53, - 0x53, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x59, 0x0a, 0x13, - 0x56, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x42, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x53, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x22, 0x57, 0x0a, 0x05, 0x4c, - 0x61, 0x79, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, - 0x64, 0x69, 0x66, 0x66, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, - 0x69, 0x66, 0x66, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, - 0x5f, 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x64, 0x42, 0x79, 0x22, 0xc3, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x18, - 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x6c, - 0x69, 0x6e, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x4c, 0x69, - 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, - 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x76, 0x0a, 0x04, 0x43, 0x56, - 0x53, 0x53, 0x12, 0x1b, 0x0a, 0x09, 0x76, 0x32, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x76, 0x32, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, - 0x1b, 0x0a, 0x09, 0x76, 0x33, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x76, 0x33, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x19, 0x0a, 0x08, - 0x76, 0x32, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, - 0x76, 0x32, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x76, 0x33, 0x5f, 0x73, 0x63, - 0x6f, 0x72, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x76, 0x33, 0x53, 0x63, 0x6f, - 0x72, 0x65, 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x52, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, - 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, - 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, - 0x72, 0x12, 0x2a, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x6c, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xf3, + 0x01, 0x0a, 0x0d, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x45, 0x0a, 0x0f, 0x70, 0x6f, 0x6c, 0x69, + 0x63, 0x79, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, + 0x0e, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, + 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x07, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x52, + 0x02, 0x69, 0x64, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x22, 0xf0, 0x01, 0x0a, 0x0e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x64, 0x76, 0x5f, 0x69, + 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x64, 0x76, 0x49, 0x64, 0x12, 0x12, + 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, + 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, + 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, + 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x2f, 0x0a, 0x13, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x12, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x64, + 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0xf7, 0x03, 0x0a, 0x18, 0x44, 0x65, 0x74, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, + 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, + 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, + 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x73, 0x6f, + 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, + 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, + 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x1f, 0x0a, 0x0b, + 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x55, 0x72, 0x6c, 0x12, 0x1e, 0x0a, + 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x16, 0x0a, + 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0c, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, + 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, + 0x12, 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, + 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x76, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x0e, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x76, 0x64, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x71, + 0x75, 0x65, 0x72, 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x22, 0xff, 0x09, 0x0a, 0x0d, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x79, 0x12, 0x29, 0x0a, 0x10, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, + 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x64, 0x12, 0x19, + 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x69, 0x6e, 0x73, + 0x74, 0x61, 0x6c, 0x6c, 0x65, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x65, 0x64, 0x56, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x69, 0x78, 0x65, 0x64, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, + 0x69, 0x78, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x74, + 0x69, 0x74, 0x6c, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, + 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, + 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x0e, 0x70, 0x6b, 0x67, 0x5f, 0x69, + 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, + 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0d, 0x70, 0x6b, + 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x29, 0x0a, 0x05, 0x6c, + 0x61, 0x79, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, + 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, + 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, + 0x74, 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0e, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, + 0x39, 0x0a, 0x04, 0x63, 0x76, 0x73, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, + 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x56, 0x75, 0x6c, + 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x43, 0x76, 0x73, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x63, 0x76, 0x73, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x63, 0x77, + 0x65, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x63, 0x77, 0x65, + 0x49, 0x64, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x75, + 0x72, 0x6c, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, + 0x79, 0x55, 0x72, 0x6c, 0x12, 0x41, 0x0a, 0x0e, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x65, + 0x64, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x44, 0x61, 0x74, 0x65, 0x12, 0x48, 0x0a, 0x12, 0x6c, 0x61, 0x73, 0x74, 0x5f, + 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x18, 0x10, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, + 0x10, 0x6c, 0x61, 0x73, 0x74, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x44, 0x61, 0x74, + 0x65, 0x12, 0x48, 0x0a, 0x14, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x61, 0x64, 0x76, 0x69, + 0x73, 0x6f, 0x72, 0x79, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0xf3, 0x01, - 0x0a, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x18, - 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x63, - 0x61, 0x75, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x43, 0x61, - 0x75, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x64, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, - 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, - 0x74, 0x65, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, - 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, - 0x61, 0x75, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, - 0x73, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x61, - 0x75, 0x73, 0x65, 0x22, 0x30, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, - 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x05, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x22, 0x9f, 0x02, 0x0a, 0x0d, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, - 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x17, 0x0a, 0x07, 0x72, 0x75, 0x6c, 0x65, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x75, 0x6c, 0x65, 0x49, 0x64, - 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, - 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x1d, - 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, - 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x07, 0x65, 0x6e, 0x64, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, - 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, - 0x12, 0x14, 0x0a, 0x05, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, - 0x72, 0x4a, 0x04, 0x08, 0x09, 0x10, 0x0a, 0x22, 0x5d, 0x0a, 0x06, 0x53, 0x65, 0x63, 0x72, 0x65, - 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x12, 0x37, 0x0a, - 0x08, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, - 0x65, 0x63, 0x72, 0x65, 0x74, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, - 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x85, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x74, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, - 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, - 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, - 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x3e, - 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, - 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, - 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x19, - 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, - 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, - 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, - 0x6e, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0xed, - 0x01, 0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x41, - 0x0a, 0x0c, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, - 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x0b, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, - 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x08, 0x66, 0x69, 0x6e, - 0x67, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, - 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, - 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, - 0x6e, 0x67, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, - 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x22, 0x98, - 0x01, 0x0a, 0x0e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, - 0x67, 0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, - 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, - 0x72, 0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, - 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, - 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0x95, 0x01, 0x0a, 0x0f, 0x4c, 0x69, - 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x22, 0x81, 0x01, - 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, - 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x4f, 0x52, 0x42, 0x49, - 0x44, 0x44, 0x45, 0x4e, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x53, 0x54, 0x52, 0x49, - 0x43, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x43, 0x49, 0x50, 0x52, - 0x4f, 0x43, 0x41, 0x4c, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x4e, 0x4f, 0x54, 0x49, 0x43, 0x45, - 0x10, 0x04, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x45, 0x52, 0x4d, 0x49, 0x53, 0x53, 0x49, 0x56, 0x45, - 0x10, 0x05, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x4e, 0x45, 0x4e, 0x43, 0x55, 0x4d, 0x42, 0x45, 0x52, - 0x45, 0x44, 0x10, 0x06, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, - 0x07, 0x22, 0x4e, 0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, - 0x22, 0x3f, 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, - 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x50, 0x4b, - 0x47, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x48, 0x45, 0x41, 0x44, 0x45, 0x52, 0x10, 0x02, 0x12, - 0x10, 0x0a, 0x0c, 0x4c, 0x49, 0x43, 0x45, 0x4e, 0x53, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, - 0x03, 0x2a, 0x44, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, - 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x4f, - 0x57, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x45, 0x44, 0x49, 0x55, 0x4d, 0x10, 0x02, 0x12, - 0x08, 0x0a, 0x04, 0x48, 0x49, 0x47, 0x48, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x52, 0x49, - 0x54, 0x49, 0x43, 0x41, 0x4c, 0x10, 0x04, 0x42, 0x31, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x71, 0x75, 0x61, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, - 0x74, 0x79, 0x2f, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x3b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x41, + 0x64, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x12, 0x40, 0x0a, 0x10, 0x63, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x76, 0x75, 0x6c, 0x6e, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x63, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x56, 0x75, 0x6c, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1d, 0x0a, + 0x0a, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x13, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x09, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x49, 0x64, 0x73, 0x12, 0x39, 0x0a, 0x0b, + 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0a, 0x64, 0x61, 0x74, + 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x58, 0x0a, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, + 0x72, 0x5f, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x2f, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, + 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x56, 0x65, + 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x0e, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, + 0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x16, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x15, 0x0a, 0x06, + 0x70, 0x6b, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x17, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x6b, + 0x67, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x18, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x1a, 0x4b, 0x0a, 0x09, 0x43, + 0x76, 0x73, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, + 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x56, 0x53, 0x53, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x59, 0x0a, 0x13, 0x56, 0x65, 0x6e, 0x64, + 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, + 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x22, 0x42, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x22, 0x57, 0x0a, 0x05, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x69, 0x66, 0x66, + 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x66, 0x66, 0x49, + 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x42, 0x79, + 0x22, 0xc3, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, + 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, + 0x6e, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, + 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x26, + 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, + 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x6f, 0x64, 0x65, + 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x76, 0x0a, 0x04, 0x43, 0x56, 0x53, 0x53, 0x12, 0x1b, + 0x0a, 0x09, 0x76, 0x32, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x76, 0x32, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x76, + 0x33, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x76, 0x33, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x19, 0x0a, 0x08, 0x76, 0x32, 0x5f, 0x73, + 0x63, 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x76, 0x32, 0x53, 0x63, + 0x6f, 0x72, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x76, 0x33, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x76, 0x33, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x98, + 0x01, 0x0a, 0x0e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, + 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, + 0x74, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x2a, 0x0a, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0xf3, 0x01, 0x0a, 0x04, 0x4c, 0x69, + 0x6e, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x63, 0x61, 0x75, 0x73, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x43, 0x61, 0x75, 0x73, 0x65, 0x12, + 0x1e, 0x0a, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x1c, 0x0a, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x20, 0x0a, + 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x12, + 0x1f, 0x0a, 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, 0x73, 0x65, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x61, 0x75, 0x73, 0x65, + 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, 0x73, 0x65, 0x18, 0x08, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x61, 0x75, 0x73, 0x65, 0x22, + 0x30, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, + 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x65, + 0x73, 0x22, 0x9f, 0x02, 0x0a, 0x0d, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x46, 0x69, 0x6e, 0x64, + 0x69, 0x6e, 0x67, 0x12, 0x17, 0x0a, 0x07, 0x72, 0x75, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x75, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, + 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, + 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, 0x64, + 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, + 0x6e, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x6d, 0x61, 0x74, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x61, 0x74, + 0x63, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4a, 0x04, 0x08, + 0x09, 0x10, 0x0a, 0x22, 0x5d, 0x0a, 0x06, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x1a, 0x0a, + 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x12, 0x37, 0x0a, 0x08, 0x66, 0x69, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, + 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, + 0x74, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, + 0x67, 0x73, 0x22, 0x85, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, + 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, + 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, + 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, + 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, + 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, + 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, + 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, + 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, + 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, + 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, + 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, + 0x65, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0xed, 0x01, 0x0a, 0x0b, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x41, 0x0a, 0x0c, 0x6c, 0x69, + 0x63, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x1e, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, + 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, + 0x52, 0x0b, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, + 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, + 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, + 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, 0x6e, 0x67, + 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, + 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, 0x6e, 0x67, 0x73, 0x12, + 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, + 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, + 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x3e, 0x0a, + 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, 0x45, + 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0x95, 0x01, 0x0a, 0x0f, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, + 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x22, 0x81, 0x01, 0x0a, 0x04, 0x45, 0x6e, + 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, + 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x4f, 0x52, 0x42, 0x49, 0x44, 0x44, 0x45, 0x4e, + 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x53, 0x54, 0x52, 0x49, 0x43, 0x54, 0x45, 0x44, + 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x43, 0x49, 0x50, 0x52, 0x4f, 0x43, 0x41, 0x4c, + 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x4e, 0x4f, 0x54, 0x49, 0x43, 0x45, 0x10, 0x04, 0x12, 0x0e, + 0x0a, 0x0a, 0x50, 0x45, 0x52, 0x4d, 0x49, 0x53, 0x53, 0x49, 0x56, 0x45, 0x10, 0x05, 0x12, 0x10, + 0x0a, 0x0c, 0x55, 0x4e, 0x45, 0x4e, 0x43, 0x55, 0x4d, 0x42, 0x45, 0x52, 0x45, 0x44, 0x10, 0x06, + 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x07, 0x22, 0x4e, 0x0a, + 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, 0x3f, 0x0a, 0x04, + 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, + 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x50, 0x4b, 0x47, 0x10, 0x01, 0x12, + 0x0a, 0x0a, 0x06, 0x48, 0x45, 0x41, 0x44, 0x45, 0x52, 0x10, 0x02, 0x12, 0x10, 0x0a, 0x0c, 0x4c, + 0x49, 0x43, 0x45, 0x4e, 0x53, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, 0x03, 0x2a, 0x44, 0x0a, + 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, + 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x4f, 0x57, 0x10, 0x01, 0x12, + 0x0a, 0x0a, 0x06, 0x4d, 0x45, 0x44, 0x49, 0x55, 0x4d, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x48, + 0x49, 0x47, 0x48, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x52, 0x49, 0x54, 0x49, 0x43, 0x41, + 0x4c, 0x10, 0x04, 0x42, 0x31, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x61, 0x71, 0x75, 0x61, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, + 0x72, 0x69, 0x76, 0x79, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x3b, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2718,7 +2788,7 @@ func file_rpc_common_service_proto_rawDescGZIP() []byte { } var file_rpc_common_service_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_rpc_common_service_proto_msgTypes = make([]protoimpl.MessageInfo, 27) +var file_rpc_common_service_proto_msgTypes = make([]protoimpl.MessageInfo, 28) var file_rpc_common_service_proto_goTypes = []interface{}{ (Severity)(0), // 0: trivy.common.Severity (LicenseCategory_Enum)(0), // 1: trivy.common.LicenseCategory.Enum @@ -2729,74 +2799,76 @@ var file_rpc_common_service_proto_goTypes = []interface{}{ (*Application)(nil), // 6: trivy.common.Application (*Package)(nil), // 7: trivy.common.Package (*PkgIdentifier)(nil), // 8: trivy.common.PkgIdentifier - (*Misconfiguration)(nil), // 9: trivy.common.Misconfiguration - (*MisconfResult)(nil), // 10: trivy.common.MisconfResult - (*PolicyMetadata)(nil), // 11: trivy.common.PolicyMetadata - (*DetectedMisconfiguration)(nil), // 12: trivy.common.DetectedMisconfiguration - (*Vulnerability)(nil), // 13: trivy.common.Vulnerability - (*DataSource)(nil), // 14: trivy.common.DataSource - (*Layer)(nil), // 15: trivy.common.Layer - (*CauseMetadata)(nil), // 16: trivy.common.CauseMetadata - (*CVSS)(nil), // 17: trivy.common.CVSS - (*CustomResource)(nil), // 18: trivy.common.CustomResource - (*Line)(nil), // 19: trivy.common.Line - (*Code)(nil), // 20: trivy.common.Code - (*SecretFinding)(nil), // 21: trivy.common.SecretFinding - (*Secret)(nil), // 22: trivy.common.Secret - (*DetectedLicense)(nil), // 23: trivy.common.DetectedLicense - (*LicenseFile)(nil), // 24: trivy.common.LicenseFile - (*LicenseFinding)(nil), // 25: trivy.common.LicenseFinding - (*LicenseCategory)(nil), // 26: trivy.common.LicenseCategory - (*LicenseType)(nil), // 27: trivy.common.LicenseType - nil, // 28: trivy.common.Vulnerability.CvssEntry - nil, // 29: trivy.common.Vulnerability.VendorSeverityEntry - (*timestamppb.Timestamp)(nil), // 30: google.protobuf.Timestamp - (*structpb.Value)(nil), // 31: google.protobuf.Value + (*Location)(nil), // 9: trivy.common.Location + (*Misconfiguration)(nil), // 10: trivy.common.Misconfiguration + (*MisconfResult)(nil), // 11: trivy.common.MisconfResult + (*PolicyMetadata)(nil), // 12: trivy.common.PolicyMetadata + (*DetectedMisconfiguration)(nil), // 13: trivy.common.DetectedMisconfiguration + (*Vulnerability)(nil), // 14: trivy.common.Vulnerability + (*DataSource)(nil), // 15: trivy.common.DataSource + (*Layer)(nil), // 16: trivy.common.Layer + (*CauseMetadata)(nil), // 17: trivy.common.CauseMetadata + (*CVSS)(nil), // 18: trivy.common.CVSS + (*CustomResource)(nil), // 19: trivy.common.CustomResource + (*Line)(nil), // 20: trivy.common.Line + (*Code)(nil), // 21: trivy.common.Code + (*SecretFinding)(nil), // 22: trivy.common.SecretFinding + (*Secret)(nil), // 23: trivy.common.Secret + (*DetectedLicense)(nil), // 24: trivy.common.DetectedLicense + (*LicenseFile)(nil), // 25: trivy.common.LicenseFile + (*LicenseFinding)(nil), // 26: trivy.common.LicenseFinding + (*LicenseCategory)(nil), // 27: trivy.common.LicenseCategory + (*LicenseType)(nil), // 28: trivy.common.LicenseType + nil, // 29: trivy.common.Vulnerability.CvssEntry + nil, // 30: trivy.common.Vulnerability.VendorSeverityEntry + (*timestamppb.Timestamp)(nil), // 31: google.protobuf.Timestamp + (*structpb.Value)(nil), // 32: google.protobuf.Value } var file_rpc_common_service_proto_depIdxs = []int32{ 7, // 0: trivy.common.PackageInfo.packages:type_name -> trivy.common.Package 7, // 1: trivy.common.Application.libraries:type_name -> trivy.common.Package 8, // 2: trivy.common.Package.identifier:type_name -> trivy.common.PkgIdentifier - 15, // 3: trivy.common.Package.layer:type_name -> trivy.common.Layer - 10, // 4: trivy.common.Misconfiguration.successes:type_name -> trivy.common.MisconfResult - 10, // 5: trivy.common.Misconfiguration.warnings:type_name -> trivy.common.MisconfResult - 10, // 6: trivy.common.Misconfiguration.failures:type_name -> trivy.common.MisconfResult - 10, // 7: trivy.common.Misconfiguration.exceptions:type_name -> trivy.common.MisconfResult - 11, // 8: trivy.common.MisconfResult.policy_metadata:type_name -> trivy.common.PolicyMetadata - 16, // 9: trivy.common.MisconfResult.cause_metadata:type_name -> trivy.common.CauseMetadata - 0, // 10: trivy.common.DetectedMisconfiguration.severity:type_name -> trivy.common.Severity - 15, // 11: trivy.common.DetectedMisconfiguration.layer:type_name -> trivy.common.Layer - 16, // 12: trivy.common.DetectedMisconfiguration.cause_metadata:type_name -> trivy.common.CauseMetadata - 0, // 13: trivy.common.Vulnerability.severity:type_name -> trivy.common.Severity - 8, // 14: trivy.common.Vulnerability.pkg_identifier:type_name -> trivy.common.PkgIdentifier - 15, // 15: trivy.common.Vulnerability.layer:type_name -> trivy.common.Layer - 28, // 16: trivy.common.Vulnerability.cvss:type_name -> trivy.common.Vulnerability.CvssEntry - 30, // 17: trivy.common.Vulnerability.published_date:type_name -> google.protobuf.Timestamp - 30, // 18: trivy.common.Vulnerability.last_modified_date:type_name -> google.protobuf.Timestamp - 31, // 19: trivy.common.Vulnerability.custom_advisory_data:type_name -> google.protobuf.Value - 31, // 20: trivy.common.Vulnerability.custom_vuln_data:type_name -> google.protobuf.Value - 14, // 21: trivy.common.Vulnerability.data_source:type_name -> trivy.common.DataSource - 29, // 22: trivy.common.Vulnerability.vendor_severity:type_name -> trivy.common.Vulnerability.VendorSeverityEntry - 20, // 23: trivy.common.CauseMetadata.code:type_name -> trivy.common.Code - 15, // 24: trivy.common.CustomResource.layer:type_name -> trivy.common.Layer - 31, // 25: trivy.common.CustomResource.data:type_name -> google.protobuf.Value - 19, // 26: trivy.common.Code.lines:type_name -> trivy.common.Line - 20, // 27: trivy.common.SecretFinding.code:type_name -> trivy.common.Code - 15, // 28: trivy.common.SecretFinding.layer:type_name -> trivy.common.Layer - 21, // 29: trivy.common.Secret.findings:type_name -> trivy.common.SecretFinding - 0, // 30: trivy.common.DetectedLicense.severity:type_name -> trivy.common.Severity - 1, // 31: trivy.common.DetectedLicense.category:type_name -> trivy.common.LicenseCategory.Enum - 2, // 32: trivy.common.LicenseFile.license_type:type_name -> trivy.common.LicenseType.Enum - 25, // 33: trivy.common.LicenseFile.fingings:type_name -> trivy.common.LicenseFinding - 15, // 34: trivy.common.LicenseFile.layer:type_name -> trivy.common.Layer - 1, // 35: trivy.common.LicenseFinding.category:type_name -> trivy.common.LicenseCategory.Enum - 17, // 36: trivy.common.Vulnerability.CvssEntry.value:type_name -> trivy.common.CVSS - 0, // 37: trivy.common.Vulnerability.VendorSeverityEntry.value:type_name -> trivy.common.Severity - 38, // [38:38] is the sub-list for method output_type - 38, // [38:38] is the sub-list for method input_type - 38, // [38:38] is the sub-list for extension type_name - 38, // [38:38] is the sub-list for extension extendee - 0, // [0:38] is the sub-list for field type_name + 9, // 3: trivy.common.Package.locations:type_name -> trivy.common.Location + 16, // 4: trivy.common.Package.layer:type_name -> trivy.common.Layer + 11, // 5: trivy.common.Misconfiguration.successes:type_name -> trivy.common.MisconfResult + 11, // 6: trivy.common.Misconfiguration.warnings:type_name -> trivy.common.MisconfResult + 11, // 7: trivy.common.Misconfiguration.failures:type_name -> trivy.common.MisconfResult + 11, // 8: trivy.common.Misconfiguration.exceptions:type_name -> trivy.common.MisconfResult + 12, // 9: trivy.common.MisconfResult.policy_metadata:type_name -> trivy.common.PolicyMetadata + 17, // 10: trivy.common.MisconfResult.cause_metadata:type_name -> trivy.common.CauseMetadata + 0, // 11: trivy.common.DetectedMisconfiguration.severity:type_name -> trivy.common.Severity + 16, // 12: trivy.common.DetectedMisconfiguration.layer:type_name -> trivy.common.Layer + 17, // 13: trivy.common.DetectedMisconfiguration.cause_metadata:type_name -> trivy.common.CauseMetadata + 0, // 14: trivy.common.Vulnerability.severity:type_name -> trivy.common.Severity + 8, // 15: trivy.common.Vulnerability.pkg_identifier:type_name -> trivy.common.PkgIdentifier + 16, // 16: trivy.common.Vulnerability.layer:type_name -> trivy.common.Layer + 29, // 17: trivy.common.Vulnerability.cvss:type_name -> trivy.common.Vulnerability.CvssEntry + 31, // 18: trivy.common.Vulnerability.published_date:type_name -> google.protobuf.Timestamp + 31, // 19: trivy.common.Vulnerability.last_modified_date:type_name -> google.protobuf.Timestamp + 32, // 20: trivy.common.Vulnerability.custom_advisory_data:type_name -> google.protobuf.Value + 32, // 21: trivy.common.Vulnerability.custom_vuln_data:type_name -> google.protobuf.Value + 15, // 22: trivy.common.Vulnerability.data_source:type_name -> trivy.common.DataSource + 30, // 23: trivy.common.Vulnerability.vendor_severity:type_name -> trivy.common.Vulnerability.VendorSeverityEntry + 21, // 24: trivy.common.CauseMetadata.code:type_name -> trivy.common.Code + 16, // 25: trivy.common.CustomResource.layer:type_name -> trivy.common.Layer + 32, // 26: trivy.common.CustomResource.data:type_name -> google.protobuf.Value + 20, // 27: trivy.common.Code.lines:type_name -> trivy.common.Line + 21, // 28: trivy.common.SecretFinding.code:type_name -> trivy.common.Code + 16, // 29: trivy.common.SecretFinding.layer:type_name -> trivy.common.Layer + 22, // 30: trivy.common.Secret.findings:type_name -> trivy.common.SecretFinding + 0, // 31: trivy.common.DetectedLicense.severity:type_name -> trivy.common.Severity + 1, // 32: trivy.common.DetectedLicense.category:type_name -> trivy.common.LicenseCategory.Enum + 2, // 33: trivy.common.LicenseFile.license_type:type_name -> trivy.common.LicenseType.Enum + 26, // 34: trivy.common.LicenseFile.fingings:type_name -> trivy.common.LicenseFinding + 16, // 35: trivy.common.LicenseFile.layer:type_name -> trivy.common.Layer + 1, // 36: trivy.common.LicenseFinding.category:type_name -> trivy.common.LicenseCategory.Enum + 18, // 37: trivy.common.Vulnerability.CvssEntry.value:type_name -> trivy.common.CVSS + 0, // 38: trivy.common.Vulnerability.VendorSeverityEntry.value:type_name -> trivy.common.Severity + 39, // [39:39] is the sub-list for method output_type + 39, // [39:39] is the sub-list for method input_type + 39, // [39:39] is the sub-list for extension type_name + 39, // [39:39] is the sub-list for extension extendee + 0, // [0:39] is the sub-list for field type_name } func init() { file_rpc_common_service_proto_init() } @@ -2878,7 +2950,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Misconfiguration); i { + switch v := v.(*Location); i { case 0: return &v.state case 1: @@ -2890,7 +2962,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*MisconfResult); i { + switch v := v.(*Misconfiguration); i { case 0: return &v.state case 1: @@ -2902,7 +2974,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PolicyMetadata); i { + switch v := v.(*MisconfResult); i { case 0: return &v.state case 1: @@ -2914,7 +2986,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DetectedMisconfiguration); i { + switch v := v.(*PolicyMetadata); i { case 0: return &v.state case 1: @@ -2926,7 +2998,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Vulnerability); i { + switch v := v.(*DetectedMisconfiguration); i { case 0: return &v.state case 1: @@ -2938,7 +3010,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataSource); i { + switch v := v.(*Vulnerability); i { case 0: return &v.state case 1: @@ -2950,7 +3022,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Layer); i { + switch v := v.(*DataSource); i { case 0: return &v.state case 1: @@ -2962,7 +3034,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CauseMetadata); i { + switch v := v.(*Layer); i { case 0: return &v.state case 1: @@ -2974,7 +3046,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CVSS); i { + switch v := v.(*CauseMetadata); i { case 0: return &v.state case 1: @@ -2986,7 +3058,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CustomResource); i { + switch v := v.(*CVSS); i { case 0: return &v.state case 1: @@ -2998,7 +3070,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Line); i { + switch v := v.(*CustomResource); i { case 0: return &v.state case 1: @@ -3010,7 +3082,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Code); i { + switch v := v.(*Line); i { case 0: return &v.state case 1: @@ -3022,7 +3094,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SecretFinding); i { + switch v := v.(*Code); i { case 0: return &v.state case 1: @@ -3034,7 +3106,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Secret); i { + switch v := v.(*SecretFinding); i { case 0: return &v.state case 1: @@ -3046,7 +3118,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DetectedLicense); i { + switch v := v.(*Secret); i { case 0: return &v.state case 1: @@ -3058,7 +3130,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LicenseFile); i { + switch v := v.(*DetectedLicense); i { case 0: return &v.state case 1: @@ -3070,7 +3142,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LicenseFinding); i { + switch v := v.(*LicenseFile); i { case 0: return &v.state case 1: @@ -3082,7 +3154,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LicenseCategory); i { + switch v := v.(*LicenseFinding); i { case 0: return &v.state case 1: @@ -3094,6 +3166,18 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LicenseCategory); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_rpc_common_service_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*LicenseType); i { case 0: return &v.state @@ -3112,7 +3196,7 @@ func file_rpc_common_service_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_rpc_common_service_proto_rawDesc, NumEnums: 3, - NumMessages: 27, + NumMessages: 28, NumExtensions: 0, NumServices: 0, }, diff --git a/rpc/common/service.proto b/rpc/common/service.proto index f35d22051deb..d5c1472b4aef 100644 --- a/rpc/common/service.proto +++ b/rpc/common/service.proto @@ -42,17 +42,18 @@ message Package { string arch = 5; // src package containing some binary packages // e.g. bind - string src_name = 6; - string src_version = 7; - string src_release = 8; - int32 src_epoch = 9; - repeated string licenses = 15; - Layer layer = 11; - string file_path = 12; - repeated string depends_on = 14; - string digest = 16; - bool dev = 17; - bool indirect = 18; + string src_name = 6; + string src_version = 7; + string src_release = 8; + int32 src_epoch = 9; + repeated string licenses = 15; + repeated Location locations = 20; + Layer layer = 11; + string file_path = 12; + repeated string depends_on = 14; + string digest = 16; + bool dev = 17; + bool indirect = 18; } message PkgIdentifier { @@ -60,6 +61,11 @@ message PkgIdentifier { string bom_ref = 2; } +message Location { + int32 start_line = 1; + int32 end_line = 2; +} + message Misconfiguration { string file_type = 1; string file_path = 2;