diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 602bcc5c..4644e0fe 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -4,6 +4,26 @@ on: branches: - main jobs: + MarkdownLint: + runs-on: ubuntu-latest + if: github.repository == 'boozallen/sdp-libraries' + container: + image: davidanson/markdownlint-cli2:0.4.0 + options: --user root + steps: + - uses: actions/checkout@v2 + - name: markdownlint-cli2 + run: markdownlint-cli2 + Vale: + runs-on: ubuntu-latest + if: github.repository == 'boozallen/sdp-libraries' + container: + image: jdkato/vale:v2.18.0 + options: --user root + steps: + - uses: actions/checkout@v2 + - name: vale + run: vale docs libraries Unit_Test: runs-on: ubuntu-latest if: github.repository == 'boozallen/sdp-libraries' diff --git a/.markdownlint-cli2.yaml b/.markdownlint-cli2.yaml index 035326ab..79fadd4e 100644 --- a/.markdownlint-cli2.yaml +++ b/.markdownlint-cli2.yaml @@ -6,4 +6,10 @@ config: # sometimes you gotta be hacky no-inline-html: false -fix: true \ No newline at end of file +fix: true + +globs: + - "**.md" +ignores: + - docs/styles + - LICENSE.md \ No newline at end of file diff --git a/Justfile b/Justfile index ed854de3..1be4738a 100644 --- a/Justfile +++ b/Justfile @@ -56,14 +56,11 @@ serve: buildImage docker run --rm -p 8000:8000 -v $(pwd):/docs {{image}} serve -a 0.0.0.0:8000 --watch-theme # Lint the documentation -lint-docs: lint-prose lint-libraries lint-markdown +lint-docs: lint-prose lint-markdown # use Vale to lint the prose of the documentation lint-prose: - docker run --rm -v $(pwd):/app -w /app jdkato/vale docs - -lint-libraries: - docker run --rm -v $(pwd):/app -w /app jdkato/vale libraries + docker run --rm -v $(pwd):/app -w /app jdkato/vale docs libraries # use markdownlit to lint the docs lint-markdown: diff --git a/LICENSE.md b/LICENSE.md index abfead93..14f5dda2 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,21 +1,23 @@ -## Booz Allen Public License v1.0 - +## Booz Allen Public License v1.0 ### INTRODUCTION -The Booz Allen Public License allows government, non-profit academic, other non-profit, and commercial entities access to distinctive, disruptive, and robust code with the goal of Empowering People to Change the World℠. Products licensed under the Booz Allen Public License are founded on the basis that collective ingenuity can make the largest impact in the community. + +The Booz Allen Public License allows government, non-profit academic, other non-profit, and commercial entities access to distinctive, disruptive, and robust code with the goal of Empowering People to Change the World℠. Products licensed under the Booz Allen Public License are founded on the basis that collective ingenuity can make the largest impact in the community. ### DEFINITIONS + * **Commercial Entity.** “Commercial Entity” means any individual or entity other than a government, non-profit academic, or other non-profit entity. * **Derivative.** “Derivative” means any work of authorship in Source Code or Object Code form that results from an addition to, deletion from, or modification of the Source Code of the Product. * **License.** “License” means this Booz Allen Public License. * **Object Code.** “Object Code” means the form resulting from transformation or translation of Source Code into machine readable code, including but not limited to, compiled object code. -* **Originator.** “Originator” means each individual or legal entity that creates, contributes to the creation of, or owns the Product. -* **Patent Claims.** “Patent Claims” means any patent claim(s) in any patent to which Originator has a right to grant a license that would be infringed by Your making, using, selling, offering for sale, having made, or importing of the Product, but for the grant of this License. -* **Product.** “Product” means the Source Code of the software which the initial Originator made available under this License, and any Derivative of such Source Code. +* **Originator.** “Originator” means each individual or legal entity that creates, contributes to the creation of, or owns the Product. +* **Patent Claims.** “Patent Claims” means any patent claim(s) in any patent to which Originator has a right to grant a license that would be infringed by Your making, using, selling, offering for sale, having made, or importing of the Product, but for the grant of this License. +* **Product.** “Product” means the Source Code of the software which the initial Originator made available under this License, and any Derivative of such Source Code. * **Source Code.** “Source Code” means software in human-readable form. * **You.** “You” means either an individual or an entity (if you are taking this license on behalf of an entity) that exercises the rights granted under this License. ### LICENSE + **Government/Non-Profit Academic/Other Non-Profit.** This Section applies if You are not a Commercial Entity. @@ -26,24 +28,23 @@ This Section applies if You are not a Commercial Entity. **Commercial Entities**. This Section applies if You are a Commercial Entity. -* **License.** Subject to the terms and conditions of this License, each Originator hereby grants You a perpetual, worldwide, non-exclusive, royalty-free license to reproduce, display, perform, modify, distribute and otherwise use the Product and Derivatives, in Source Code and Object Code form, in accordance with the terms and conditions of this License for the sole purpose of Your internal business purposes and the provision of services to government, non-profit academic, and other non-profit entities. +* **License.** Subject to the terms and conditions of this License, each Originator hereby grants You a perpetual, worldwide, non-exclusive, royalty-free license to reproduce, display, perform, modify, distribute and otherwise use the Product and Derivatives, in Source Code and Object Code form, in accordance with the terms and conditions of this License for the sole purpose of Your internal business purposes and the provision of services to government, non-profit academic, and other non-profit entities. * **Distribution and Derivatives.** You may distribute to third parties copies of the Product, including any Derivative that You create, in Source Code or Object Code form. If You distribute copies of the Product, including any Derivative that You create, in Source Code form, such distribution must be under the terms of this License and You must inform recipients of the Source Code that the Product is governed under this License and how they can obtain a copy of this License. You may distribute to third parties copies of the Product, including any Derivative that You create, in Object Code form, or allow third parties to access or use the Product, including any Derivative that You create, under a license of Your choice, provided that You make available, and inform the recipient of such distribution how they can obtain, a copy of the Source Code thereof, at no charge, and inform the recipient of the Source Code that the Product is governed under this License and how they can obtain a copy of this License. -* **Commercial Sales.** You may not distribute, or allow third parties to access or use, the Product or any Derivative for a fee, unless You first obtain permission from the Originator. If Booz Allen Hamilton, please contact Booz Allen Hamilton at . - +* **Commercial Sales.** You may not distribute, or allow third parties to access or use, the Product or any Derivative for a fee, unless You first obtain permission from the Originator. If Booz Allen Hamilton, please contact Booz Allen Hamilton at . + **Patent Claim(s)**. -This Section applies regardless of whether You are a government, non-profit academic, or other non-profit entity or a Commercial Entity. +This Section applies regardless of whether You are a government, non-profit academic, or other non-profit entity or a Commercial Entity. + +* **Patent License.** Subject to the limitations in the Sections above, each Originator hereby grants You a perpetual, worldwide, non-exclusive, royalty-free license under Patent Claims of such Originator to make, use, sell, offer for sale, have made, and import the Product. The foregoing patent license does not apply (a) to any code that an Originator has removed from the Product, or (b) for infringement caused by Your modifications of the Product or the combination of any Derivative created by You or on Your behalf with other software. -* **Patent License.** Subject to the limitations in the Sections above, each Originator hereby grants You a perpetual, worldwide, non-exclusive, royalty-free license under Patent Claims of such Originator to make, use, sell, offer for sale, have made, and import the Product. The foregoing patent license does not apply (a) to any code that an Originator has removed from the Product, or (b) for infringement caused by Your modifications of the Product or the combination of any Derivative created by You or on Your behalf with other software. +### GENERAL TERMS -### GENERAL TERMS This Section applies regardless of whether You are a government, non-profit academic, or other non-profit entity or a Commercial Entity. * **Required Notices.** If You distribute the Product or a Derivative, in Object Code or Source Code form, You shall not remove or otherwise modify any proprietary markings or notices contained within or placed upon the Product or any Derivative. Any distribution of the Product or a Derivative, in Object Code or Source Code form, shall contain a clear and conspicuous Originator copyright and license reference in accordance with the below: - * *Unmodified Product Notice*: “This software package is licensed under the Booz Allen Public License. Copyright © 20__ [Copyright Holder Name]. All Rights Reserved.” - * *Derivative Notice*: “This software package is licensed under the Booz Allen Public License. Portions of this code are Copyright © 20__ [Copyright Holder Name]. All Rights Reserved.” + * *Unmodified Product Notice*: “This software package is licensed under the Booz Allen Public License. Copyright © 20__ [Copyright Holder Name]. All Rights Reserved.” + * *Derivative Notice*: “This software package is licensed under the Booz Allen Public License. Portions of this code are Copyright © 20__ [Copyright Holder Name]. All Rights Reserved.” * **Compliance with Laws.** You agree that You shall not reproduce, display, perform, modify, distribute and otherwise use the Product in any way that violates applicable law or regulation or infringes or violates the rights of others, including, but not limited to, third party intellectual property, privacy, and publicity rights. * **Disclaimer.** You understand that the Product is licensed to You, and not sold. The Product is provided on an “As Is” basis, without any warranties, representations, and guarantees, whether oral or written, express, implied or statutory, with regard to the Product, including without limitation, warranties of merchantability, fitness for a particular purpose, title, non-infringement, non-interference, and warranties arising from course of dealing or usage of trade, to the maximum extent permitted by applicable law. Originator does not warrant that (i) the Product will meet your needs; (ii) the Product will be error-free or accessible at all times; or (iii) the use or the results of the use of the Product will be correct, accurate, timely, or otherwise reliable. You acknowledge that the Product has not been prepared to meet Your individual requirements, whether or not such requirements have been communicated to Originator. You assume all responsibility for use of the Product. * **Limitation of Liability.** Under no circumstances and under no legal theory, whether tort (including negligence), contract, or otherwise, shall any Originator, or anyone who distributes the Product in accordance with this License, be liable to You for any direct, indirect, special, incidental, or consequential damages of any character including, without limitation, damages for lost profits, loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses, even if informed of the possibility of such damages. * **Severability.** If the application of any provision of this License to any particular facts or circumstances shall be held to be invalid or unenforceable, then the validity and enforceability of other provisions of this License shall not in any way be affected or impaired thereby. - - diff --git a/README.md b/README.md index af8851e4..78477c9d 100644 --- a/README.md +++ b/README.md @@ -2,13 +2,13 @@ This repository contains [Booz Allen's](https://boozallen.com) pipeline libraries that integrate with the [Jenkins Templating Engine](https://plugins.jenkins.io/templating-engine/). -If you want to learn more, the best place to get started is the [documentation](https://boozallen.github.io/sdp-docs/sdp-libraries/). +If you want to learn more, the best place to get started is the [documentation](https://boozallen.github.io/sdp-docs/sdp-libraries/). ## Usage -In order to use the different libraries in this repository, you can configure this repository as a library source, for a detailed example of how to do this you may refer to [this lab](https://boozallen.github.io/sdp-docs/learning-labs/1/jte-the-basics/3-first-libraries.html#_configure_the_library_source). +In order to use the different libraries in this repository, you can configure this repository as a library source, for a detailed example of how to do this you may refer to [this lab](https://boozallen.github.io/sdp-docs/learning-labs/1/jte-the-basics/3-first-libraries.html#_configure_the_library_source). -It is recommended that rather than using the master branch you pin your library source to a particular github release such as: https://github.com/boozallen/sdp-libraries/tree/release/2.0/libraries [like 2.0]. This helps to ensure that you have greater control in version management. +It is recommended that rather than using the master branch you pin your library source to a particular github release such as: [like 2.0]. This helps to ensure that you have greater control in version management. Also ensure that in addition to whichever library you wish to use you include the `sdp` library. This helps to resolve a number of dependency errors you may otherwise face. diff --git a/build.gradle b/build.gradle index 3f9875dc..d838e83c 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ repositories { maven { url "http://repo.maven.apache.org/maven2" } } -version = 3.2 +version = 4.3 // determine test files def tests = [ "resources/test" ] diff --git a/catalog-info.yaml b/catalog-info.yaml new file mode 100644 index 00000000..facd90c3 --- /dev/null +++ b/catalog-info.yaml @@ -0,0 +1,24 @@ +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: sdp-libraries + title: Solutions Delivery Platform (SDP) Libraries + description: "The Solutions Delivery Platform Pipeline Libraries for the Jenkins Templating Engine (JTE)" + annotations: + github.com/project-slug: boozallen/sdp-libraries + tags: + - pipeline + - sdp + - solutions-delivery-platform + - jenkins + - sonarqube + - devsecops + - devops + - supply-chain-security + links: + - url: https://boozallen.github.io/sdp-docs/sdp-libraries/ + title: Documentation Website +spec: + type: docs + lifecycle: production + owner: uip/uip-studio diff --git a/docs/glossary.md b/docs/glossary.md index b7a257a1..2d4ada66 100644 --- a/docs/glossary.md +++ b/docs/glossary.md @@ -14,4 +14,5 @@ *[PR]: Pull Request *[JSON]: JavaScript Object Notation *[CVE]: Common Vulnerabilities and Exposures -*[CLI]: Command Line Interface \ No newline at end of file +*[CLI]: Command Line Interface +*[SBOM]: Software Bill of Materials \ No newline at end of file diff --git a/docs/styles/Microsoft/HeadingAcronyms.yml b/docs/styles/Microsoft/HeadingAcronyms.yml deleted file mode 100644 index 9dc3b6c2..00000000 --- a/docs/styles/Microsoft/HeadingAcronyms.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Avoid using acronyms in a title or heading." -link: https://docs.microsoft.com/en-us/style-guide/acronyms#be-careful-with-acronyms-in-titles-and-headings -level: warning -scope: heading -tokens: - - '[A-Z]{2,4}' diff --git a/docs/styles/Vocab/SDP/accept.txt b/docs/styles/Vocab/SDP/accept.txt index 1ef173ef..7410cc9f 100644 --- a/docs/styles/Vocab/SDP/accept.txt +++ b/docs/styles/Vocab/SDP/accept.txt @@ -40,4 +40,10 @@ Splunk [Rr]etag(|s|ging) [Dd]ockerfiles? Anchore -[Pp]arsable \ No newline at end of file +[Pp]arsable +[Ss]yft +(SBOM|sbom)s? +[gG]rype +(json|JSON) +(cli|CLI) +snake_case diff --git a/libraries/docker_compose/README.md b/libraries/docker_compose/README.md index efae6ab6..f9066343 100644 --- a/libraries/docker_compose/README.md +++ b/libraries/docker_compose/README.md @@ -10,9 +10,9 @@ This library allows you to perform docker compose commands. --- -| Step | Description | -| ----------- | ----------- | -| `up()` | Runs `docker-compose up` with values taken from the configuration. | +| Step | Description | +|----------|----------------------------------------------------------------------| +| `up()` | Runs `docker-compose up` with values taken from the configuration. | | `down()` | Runs `docker-compose down` with values taken from the configuration. | ## Example Usage @@ -28,13 +28,13 @@ compose.down() --- -The library configurations for docker_compose are as follows: +The library configurations for `docker_compose` are as follows: -| Parameter | Description | -| ----------- | ----------- | -| `files` | Optional list of ordered docker compose files to run. Omitting this parameter causes the command `docker-compose up` to run on a file named `docker-compose.yml`. | -| `env` | Optional environment file to pass to the docker-compose command. | -| `sleep` | Optional configuration that controls how long to wait after running the `up()` command before continuing the pipeline execution. This is helpful when the Docker containers need to be started before other steps, like integration tests, may run. | +| Parameter | Description | +|-----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `files` | Optional list of ordered docker compose files to run. Omitting this parameter causes the command `docker-compose up` to run on a file named `docker-compose.yml`. | +| `env` | Optional environment file to pass to the docker-compose command. | +| `sleep` | Optional configuration that controls how long to wait after running the `up()` command before continuing the pipeline execution. This is helpful when the Docker containers need to be started before other steps, like integration tests, may run. | ## Example Library Configuration diff --git a/libraries/dotnet/README.md b/libraries/dotnet/README.md index 1d1f2af5..8920dd72 100644 --- a/libraries/dotnet/README.md +++ b/libraries/dotnet/README.md @@ -1,23 +1,24 @@ --- -description: This library allows you to perform .NET build and test commands in the SDP dotnet-sdk:5.0 agent container +description: This library allows you to perform .NET build and test commands in the SDP dotnet-sdk agent container --- # DotNet -This library allows you to perform .NET build and test commands in the SDP `dotnet-sdk:5.0` agent container. +This library allows you to perform .NET build and test commands in the SDP `dotnet-sdk` agent container. ## Steps -| Step | Description | -| ----------- | ----------- | -| `source_build` | This step leverages the `dotnet publish` command to build your application and output the results to the specified directory via `outDir` variable. `outDir` defaults to a folder named "bin." The specified folder is archived as a Jenkins artifact. | -| `unit_test` | This step leverages the `dotnet test` command to run the unit, integration and functional tests specified in the application repository and outputs the results to a specified directory via `resultDir` variable. `resultDir` defaults to a folder named "coverage." The specified folder is archived as a Jenkins artifact.| +| Step | Description | +| -------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `source_build` | This step leverages the `dotnet publish` command to build your application and output the results to the specified directory via `outDir` variable. `outDir` defaults to a folder named "bin." The specified folder is archived as a Jenkins artifact. | +| `unit_test` | This step leverages the `dotnet test` command to run the unit, integration and functional tests specified in the application repository and outputs the results to a specified directory via `resultDir` variable. `resultDir` defaults to a folder named "coverage." The specified folder is archived as a Jenkins artifact. | ## Configuration ``` groovy title='pipeline_config.groovy' libraries { dotnet { + sdk_image = 'dotnet-sdk:6.0.106' source_build { outDir = "applicationOutput" } @@ -31,4 +32,4 @@ libraries { ## Dependencies * The SDP library -* Access to the `dotnet-sdk:5.0` build agent container via the repository defined in your SDP library configuration +* Access to a `dotnet-sdk` build agent container via the repository defined in your SDP library configuration diff --git a/libraries/dotnet/library_config.groovy b/libraries/dotnet/library_config.groovy new file mode 100644 index 00000000..a218e669 --- /dev/null +++ b/libraries/dotnet/library_config.groovy @@ -0,0 +1,24 @@ +fields{ + required{ + } + optional{ + image = String + unity_credential_id = String + unity_serial_id = String + unity_app = Boolean + run_sca = Boolean + activate_license_parameters = String[] + build_unity_parameters = String[] + workspace_name = String + // above was previously in required + wait_for_quality_gate = Boolean + enforce_quality_gate = Boolean + credential_id = String + sonar_token = String + stage_display_name = String + timeout_duration = Number + timeout_unit = [ "NANOSECONDS", "MICROSECONDS", "MILLISECONDS", "SECONDS", "MINUTES", "HOURS", "DAYS" ] + cli_parameters = List + unstash = List + } +} \ No newline at end of file diff --git a/libraries/dotnet/steps/build_dotnet.groovy b/libraries/dotnet/steps/build_dotnet.groovy new file mode 100644 index 00000000..39186769 --- /dev/null +++ b/libraries/dotnet/steps/build_dotnet.groovy @@ -0,0 +1,44 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.dotnet.steps +import jenkins.model.Jenkins + +void call() { + + // default values for config options + LinkedHashMap defaults = [ + image: "dotnet-sonar-scanner:5.2.2-1.1", + stage_display_name: "Dotnet Build", + cli_parameters: [] // does it makes sense to allow people to pass additional params? + ] + + // sets image to use + String image = config.image ?: defaults.image + + // purely aesthetic. the name of the "Stage" for this task. + String stage_display_name = config.stage_display_name ?: defaults.stage_display_name + + stage(stage_display_name) { + // Need to move container to SDP. + // using same container so this is no longer needed?? + inside_sdp_image image, { + + // fetch the source code + unstash "workspace" + + // build the build command + ArrayList dotnet_build_command = [ "dotnet build" ] + dotnet_build_command << (config.cli_parameters ?: defaults.cli_parameters) + // run dotnet build on sln + sh dotnet_build_command.flatten().join(" ") + + + + // stash build results + stash "workspace" + } + } +} diff --git a/libraries/dotnet/steps/build_source.groovy b/libraries/dotnet/steps/build_source.groovy new file mode 100644 index 00000000..023f7d21 --- /dev/null +++ b/libraries/dotnet/steps/build_source.groovy @@ -0,0 +1,35 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.dotnet.steps +import jenkins.model.Jenkins + +void call() { + + // default values for config options + LinkedHashMap defaults = [ + unity_app: false + ] + + // whether or not this is a unity build + Boolean unity_app = defaults.unity_app + if(config.containsKey("unity_app")){ + unity_app = config.unity_app + } + + if (jte.libraries.sonarqube) { + println "Skipping this step, build occurs during static code analysis." +} +else { + if (unity_app) { + build_unity() + } + + build_dotnet() +} + + + +} \ No newline at end of file diff --git a/libraries/dotnet/steps/build_unity.groovy b/libraries/dotnet/steps/build_unity.groovy new file mode 100644 index 00000000..af2471c6 --- /dev/null +++ b/libraries/dotnet/steps/build_unity.groovy @@ -0,0 +1,66 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.dotnet.steps +import jenkins.model.Jenkins +import com.cloudbees.plugins.credentials.Credentials +import com.cloudbees.plugins.credentials.CredentialsProvider +import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl +import org.jenkinsci.plugins.plaincredentials.impl.StringCredentialsImpl + +void call() { + + // default values for config options + // TODO: make the untiy creds required params and remove these defualt values + // print error if null, there are examples of doing this in other libs + LinkedHashMap defaults = [ + image: "unity:ubuntu-2020.3.30f1-base-1.0.1-1.1", + stage_display_name: "Unity Build", + unity_credential_id: "unitycreds", + unity_serial_id: "unityserial", + activate_license_parameters: [ "-nographics", "-logFile=/dev/stdout"], + build_unity_parameters: [ "-nographics", "-logFile=/dev/stdout" ] + ] + + // credential ID for Unity license + String unity_credential_id = config.unity_credential_id ?: defaults.unity_credential_id + + // credential ID for Unity serial + String unity_serial_id = config.unity_serial_id ?: defaults.unity_serial_id + + // sets image to use + String image = config.image ?: defaults.image + + // purely aesthetic. the name of the "Stage" for this task. + String stage_display_name = config.stage_display_name ?: defaults.stage_display_name + + stage(stage_display_name) { + inside_sdp_image image, { + withCredentials([ usernamePassword(credentialsId: unity_credential_id, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD'), + string(credentialsId: unity_serial_id, variable: 'SERIAL')]) { // can this be a secret credential type? Should variable name be a configurable via config file? + // base activate license command to execute + unstash "workspace" + + ArrayList activate_license_command = [ "unity-editor -username '${USERNAME}' -password '${PASSWORD}' -serial '${SERIAL}' -projectPath=${workspace} -quit" ] + // join user provided params + activate_license_command << (config.activate_license_parameters ?: defaults.activate_license_parameters) + // Activate Unity License + sh activate_license_command.flatten().join(" ") + + // base build unity command to execute + // TODO: -projectPath=${workspace} *** seems the solution inherits this name, is this configurable via parameters? ** + ArrayList build_unity_command = [ "unity-editor -projectPath=${workspace} -executeMethod UnityEditor.SyncVS.SyncSolution -quit" ] + // join user provided unity build params + build_unity_command << (config.build_unity_parameters ?: defaults.build_unity_parameters) + // build the Unity solution + sh build_unity_command.flatten().join(" ") + + // stash build results + stash "workspace" + + } + } + } +} diff --git a/libraries/dotnet/steps/dotnet_invoke.groovy b/libraries/dotnet/steps/dotnet_invoke.groovy index 40b0596d..a1025315 100644 --- a/libraries/dotnet/steps/dotnet_invoke.groovy +++ b/libraries/dotnet/steps/dotnet_invoke.groovy @@ -11,6 +11,8 @@ void call() { String outDir = "" String resultDir = "" + String sdkImage = config?.sdk_image ?: "dotnet-sdk:latest" + switch(stepContext.name) { case "source_build": stepName = "DotNet Build" @@ -25,7 +27,7 @@ void call() { } stage(stepName) { - inside_sdp_image "dotnet-sdk:5.0.214", { + inside_sdp_image "${sdkImage}", { unstash "workspace" if (stepName == "DotNet Build") { diff --git a/libraries/dotnet/test/BuildDotnetSpec.groovy b/libraries/dotnet/test/BuildDotnetSpec.groovy new file mode 100644 index 00000000..5b4c2d56 --- /dev/null +++ b/libraries/dotnet/test/BuildDotnetSpec.groovy @@ -0,0 +1,59 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.dotnet + +public class BuildDotnetSpec extends JTEPipelineSpecification { + + + def DotNetBuild = null + + LinkedHashMap minimalUnitTestConfig = [ + unit_test: [ + stepName: "unit_test", + resultDir: "test" + ] + ] + + def setup() { + explicitlyMockPipelineStep("inside_sdp_image") + explicitlyMockPipelineStep("login_to_registry") + explicitlyMockPipelineVariable("workspace") + + DotNetBuild = loadPipelineScriptForStep("dotnet", "build_dotnet") + } + + def "Ustash" () { + setup: + DotNetBuild = loadPipelineScriptForStep("dotnet", "build_dotnet") + DotNetBuild.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + when: + DotNetBuild() + then: + 1 * getPipelineMock("unstash").call('workspace') + } + + + def "Unit tests run successfully" () { + setup: + def sharedLib = loadPipelineScriptForTest("dotnet/steps/build_dotnet.groovy") + sharedLib.getBinding().setVariable("BRANCH_NAME", "master") + DotNetBuild.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + when: + DotNetBuild() + then: + noExceptionThrown() + 1 * getPipelineMock("sh").call('dotnet build') + 1 * getPipelineMock("unstash").call('workspace') + 1 * getPipelineMock("stage").call('Dotnet Build', _) + 1 * getPipelineMock("inside_sdp_image").call('dotnet-sonar-scanner:5.2.2-1.1', _) + 1 * getPipelineMock("sh").toString() + 1 * getPipelineMock("stash").toString() + 1 * getPipelineMock("unstash").toString() + 1 * getPipelineMock("stage").toString() + 1 * getPipelineMock("inside_sdp_image").toString() + 1 * getPipelineMock("stash").call('workspace') + } +} diff --git a/libraries/dotnet/test/BuildSourceSpec.groovy b/libraries/dotnet/test/BuildSourceSpec.groovy new file mode 100644 index 00000000..51842cac --- /dev/null +++ b/libraries/dotnet/test/BuildSourceSpec.groovy @@ -0,0 +1,100 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.dotnet + +public class BuildSourceSpec extends JTEPipelineSpecification { + def BuildSource = null + + // expect lib to call build_unity() then build_dotnet() if unity_app == true + + // expect lib to call build_dotnet() only if unity_app == false + + LinkedHashMap minimalSourceBuildConfig = [ + source_build: [ + stepName: "source_build", + outDir: "OutTest" + ] + ] + LinkedHashMap minimalUnitTestConfig = [ + unit_test: [ + stepName: "unit_test", + resultDir: "test" + ] + ] + + def setup() { + explicitlyMockPipelineStep("inside_sdp_image") + explicitlyMockPipelineStep("login_to_registry") + explicitlyMockPipelineStep("build_dotnet") + explicitlyMockPipelineVariable("workspace") + + BuildSource = loadPipelineScriptForStep("dotnet", "build_source") + } + + // would like to go over this with Conner. + /* def "Lib to print skip if sonarqube lib is loaded" () { + setup: + BuildSource.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + BuildSource.getBinding().setVariable("unity_app", "true") + when: + def result = BuildSource() + //def result = BuildSource.if(a,b) + then: + //1 * getPipelineMock("if").call('Skipping this step, build occurs during static code analysis.') + // Use + 1 * getPipelineMock("config").call() + result == expected + where: + a | b || expected + null | null || false + "" | "" || false + "test" | "foo" || true + } */ + + def "Is jte.libraries.dotnet library loaded?" () { // test definition + setup: + boolean jteLibraryLoaded = true + when: + boolean resultIfLibraryLoaded = jteLibraryLoaded + then: + resultIfLibraryLoaded == true // implicit assertion + } + + + /* def "Skip step if SonarQube library is loaded" () { + setup: + BuildSource.getBinding().setVariable() + when: + BuildSource() + then: + 0 * getPipelineMock("build_dotnet").toString() + 0 * getPipelineMock("build_dotnet").call() + } */ + + /* def "Build dotnet" () { + setup: + BuildSource.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + BuildSource.getBinding().setVariable("unity_app", "false") + when: + BuildSource() + then: + noExceptionThrown() + 1 * getPipelineMock("build_dotnet").toString() + 1 * getPipelineMock("build_dotnet").call() + } */ + + /* def "Unit tests run successfully" () { + setup: + BuildSource.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + when: + BuildSource() + then: + noExceptionThrown() + 1 * getPipelineMock("build_dotnet").toString() + 1 * getPipelineMock("build_dotnet").call() + } . */ + +} diff --git a/libraries/dotnet/test/BuildUnitySpec.groovy b/libraries/dotnet/test/BuildUnitySpec.groovy new file mode 100644 index 00000000..0eac91c5 --- /dev/null +++ b/libraries/dotnet/test/BuildUnitySpec.groovy @@ -0,0 +1,92 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.dotnet + +public class BuildUnitySpec extends JTEPipelineSpecification { + + + def UnityBuild = null + def UnityBuild2 = null + + LinkedHashMap minimalUnitTestConfig = [ + unit_test: [ + stepName: "unit_test", + resultDir: "test" + ] + ] + + def setup() { + explicitlyMockPipelineStep("inside_sdp_image") + explicitlyMockPipelineStep("login_to_registry") + explicitlyMockPipelineVariable("USERNAME") + explicitlyMockPipelineVariable("PASSWORD") + explicitlyMockPipelineVariable("SERIAL") + explicitlyMockPipelineVariable("workspace") + + UnityBuild = loadPipelineScriptForStep("dotnet", "build_unity") + } + + def "Shared Library Variables" () { + setup: + def MyFunction = loadPipelineScriptForTest("dotnet/steps/build_unity.groovy") + when: + MyFunction.getBinding().setVariable("BRANCH_NAME", "master") + then: + 0 * getPipelineMock("unstash").call('workspace') + } + + def "Ustash" () { + setup: + UnityBuild = loadPipelineScriptForStep("dotnet", "build_unity") + UnityBuild.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + when: + UnityBuild() + then: + 1 * getPipelineMock("unstash").call('workspace') + } + + def "Credentials" () { + setup: + UnityBuild = loadPipelineScriptForStep("dotnet", "build_unity") + UnityBuild.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + when: + UnityBuild() + then: + 1 * getPipelineMock("usernamePassword.call").call(['credentialsId':'unitycreds', 'usernameVariable':'USERNAME', 'passwordVariable':'PASSWORD']) + + } + + + def "Unit tests run successfully" () { + setup: + def sharedLib = loadPipelineScriptForTest("dotnet/steps/build_unity.groovy") + sharedLib.getBinding().setVariable("BRANCH_NAME", "master") + UnityBuild.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + when: + UnityBuild() + then: + noExceptionThrown() + 2 * getPipelineMock("sh").toString() + 1 * getPipelineMock("stage").call('Unity Build', _) + 1 * getPipelineMock("string.call").call(['credentialsId':'unityserial', 'variable':'SERIAL']) + 1 * getPipelineMock("usernamePassword.call").call(['credentialsId':'unitycreds', 'usernameVariable':'USERNAME', 'passwordVariable':'PASSWORD']) + 1 * getPipelineMock("inside_sdp_image").call('unity:ubuntu-2020.3.30f1-base-1.0.1-1.1', _) + 1 * getPipelineMock("withCredentials").call([null, null], _) + 1 * getPipelineMock("stash").toString() + 1 * getPipelineMock("usernamePassword.call").toString() + 1 * getPipelineMock("unstash").toString() + 1 * getPipelineMock("stage").toString() + 1 * getPipelineMock("inside_sdp_image").toString() + 1 * getPipelineMock("withCredentials").toString() + 1 * getPipelineMock("string.call").toString() + 1 * getPipelineMock("sh").call('unity-editor -projectPath=Mock Generator for [workspace] -executeMethod UnityEditor.SyncVS.SyncSolution -quit -nographics -logFile=/dev/stdout') + 1 * getPipelineMock("sh").call('unity-editor -username \'Mock Generator for [USERNAME]\' -password \'Mock Generator for [PASSWORD]\' -serial \'Mock Generator for [SERIAL]\' -projectPath=Mock Generator for [workspace] -quit -nographics -logFile=/dev/stdout') + 1 * getPipelineMock("stash").call('workspace') + } + + + +} diff --git a/libraries/git/README.md b/libraries/git/README.md index 484dfcc3..048ed385 100644 --- a/libraries/git/README.md +++ b/libraries/git/README.md @@ -114,7 +114,7 @@ on_merge_request from: /^[Ff]eature-.*/, to: develop, { } ``` -### Example using on_change +### Example using `on_change` ```groovy on_change{ diff --git a/libraries/grype/README.md b/libraries/grype/README.md new file mode 100644 index 00000000..8d741fc1 --- /dev/null +++ b/libraries/grype/README.md @@ -0,0 +1,37 @@ +--- +description: Uses the Grype CLI to scan container images for vulnerabilities. +--- + +# Grype + +Uses the [Grype CLI](https://github.com/anchore/grype) to scan container images for vulnerabilities. + +## Steps + +| Step | Description | +|------------------------|------------------------------------------------------------| +| container_image_scan() | Performs the Grype scan against your scaffold build image. | + +## Configuration + +| Library Configuration | Description | Type | Default Value | Options | +|-----------------------|----------------------------------------------------------|--------|---------------|---------------------------------------------------| +| `grype_container` | The container image to execute the scan within | String | grype:0.38.0 | | +| `report_format` | The output format of the generated report | String | json | `json`, `table`, `cyclonedx`, `template` | +| `fail_on_severity` | The severity level threshold that will fail the pipeline | String | high | `none`, `negligible`, `low`, `medium`, `high`, `critical` | +| `grype_config` | A custom path to a grype configuration file | String | `null` | | + +## Grype Configuration File + +If `grype_config` isn't provided, the default locations for an application are `.grype.yaml`, `.grype/config.yaml`. + +!!! note "Learn More About Grype Configuration" + + Read [the grype docs](https://github.com/anchore/grype#configuration) to learn more about the Grype configuration file + +## Dependencies + +--- + +* This library requires that the `docker` library also be loaded and `build()` be invoked before `container_image_scan()` +* If the default `grype_container` is replaced, it must be able to run docker containers (packages: docker-ce, docker-ce-cli and containerd.io). diff --git a/libraries/grype/library_config.groovy b/libraries/grype/library_config.groovy new file mode 100644 index 00000000..dc6a1389 --- /dev/null +++ b/libraries/grype/library_config.groovy @@ -0,0 +1,8 @@ +fields{ + optional{ + grype_container = String + report_format = ["json", "table", "cyclonedx", "template"] + fail_on_severity = ["none", "negligible", "low", "medium", "high", "critical"] + grype_config = String + } +} diff --git a/libraries/grype/resources/transform-grype-scan-results.sh b/libraries/grype/resources/transform-grype-scan-results.sh new file mode 100755 index 00000000..f5689730 --- /dev/null +++ b/libraries/grype/resources/transform-grype-scan-results.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +RAW_RESULTS=$1 +GRYPE_CONFIG=$2 + +# show whitelist count +WHITELIST_COUNT=$(cat $GRYPE_CONFIG | python3 -m yq -r '.ignore | length') +echo "${WHITELIST_COUNT} CVE(s) were whitelisted." +printf "The whitelist can be found in $GRYPE_CONFIG.\n\n" + +# transform the results into an organized array +cat "$RAW_RESULTS" \ + | jq -r ' + def severity_to_number: + { + "Critical": 0, + "High": 1, + "Medium": 2, + "Low": 3, + "None": 4, + }[.]; + + .matches + | map(. | { + cve: .vulnerability.id, + severity: .vulnerability.severity, + package: .artifact.name, + version: .artifact.version, + type: .artifact.type, + location: .artifact.locations[].path, + url: .vulnerability.dataSource + }) + | sort_by([(.severity | severity_to_number), .package])' \ + > transformed-results.json + +# get the CVE count +CVE_COUNT=$(cat transformed-results.json | jq -r 'length') + +if [ "$CVE_COUNT" -eq "0" ] +then + echo "No CVEs detected! :)" +else + # transform the results into table columns + cat transformed-results.json \ + | jq -r ' + map(join("|")) + | .[]' \ + > results.txt + + # display results as a table + echo -e "Vulnerability|Severity|Package|Version|Type|Location|Link\n$(cat results.txt)" \ + | column -t -s "|" +fi diff --git a/libraries/grype/steps/container_image_scan.groovy b/libraries/grype/steps/container_image_scan.groovy new file mode 100644 index 00000000..a690e9a7 --- /dev/null +++ b/libraries/grype/steps/container_image_scan.groovy @@ -0,0 +1,115 @@ +package libraries.grype.steps + +void call() { + stage("Grype Image Scan") { + String grypeContainer = config?.grype_container ?: "grype:0.38.0" + String outputFormat = config?.report_format ?: 'json' + String severityThreshold = config?.fail_on_severity ?: 'high' + String grypeConfig = config?.grype_config + String resultsFileFormat = ".txt" + String ARGS = "" + // is flipped to True if an image scan fails + Boolean shouldFail = false + + if (outputFormat != null) { + ARGS += "-o ${outputFormat} " + if (outputFormat == 'json') { + resultsFileFormat = '.json' + } + else if (outputFormat == 'cyclonedx') { + resultsFileFormat = '.xml' + } + else if (outputFormat == 'template') { + //placeholder for custom template format + resultsFileFormat = '.template' + } + } + + if (severityThreshold != "none") { + ARGS += "--fail-on ${severityThreshold} " + } + + inside_sdp_image(grypeContainer){ + login_to_registry{ + unstash "workspace" + + // Gets environment variable and sets it to a groovy var + String HOME = sh (script: 'echo $HOME', returnStdout: true).trim() + + // Gets environment variable and sets it to a groovy var + String XDG = sh (script: 'echo $XDG_CONFIG_HOME', returnStdout: true).trim() + + if (grypeConfig != null) { + ARGS += "--config ${grypeConfig}" + echo "Grype file explicitly specified in pipeline_config.groovy" + } + else if (fileExists('.grype.yaml')) { + grypeConfig = '.grype.yaml' + ARGS += "--config ${grypeConfig}" + echo "Found .grype.yaml" + } + else if (fileExists('.grype/config.yaml')) { + grypeConfig = '.grype/config.yaml' + ARGS += "--config ${grypeConfig}" + echo "Found .grype/config.yaml" + } + else if (fileExists("${HOME}/.grype.yaml")) { + grypeConfig = "${HOME}/.grype.yaml" + ARGS += "--config ${grypeConfig}" + echo "Found ~/.grype.yaml" + } + else if (fileExists("${XDG}/grype/config.yaml")) { + grypeConfig = "${XDG}/grype/config.yaml" + ARGS += "--config ${grypeConfig}" + echo "Found /grype/config.yaml" + } + + def images = get_images_to_build() + images.each { img -> + // Use $img.repo to help name our results uniquely. Checks to see if a forward slash exists and splits the string at that location. + String rawResultsFile, transformedResultsFile + if (img.repo.contains("/")) { + String[] repoImageName = img.repo.split('/') + rawResultsFile = repoImageName[1] + '-grype-scan-results' + resultsFileFormat + transformedResultsFile = repoImageName[1] + '-grype-scan-results.txt' + } + else { + rawResultsFile = "${img.repo}-grype-scan-results" + resultsFileFormat + transformedResultsFile = "${img.repo}-grype-scan-results.txt" + } + + // perform the grype scan + try { + sh "grype ${img.registry}/${img.repo}:${img.tag} ${ARGS} >> ${rawResultsFile}" + } + // Catch the error on quality gate failure + catch(Exception err) { + shouldFail = true + echo "Failed: ${err}" + echo "Grype Quality Gate Failed. There are one or more CVE's that exceed the maximum allowed severity rating!" + } + // display the results in a human-readable format + finally { + //Specific to BASS team. Allows Backstage to ingest JSON but also creates a human readable artifact. + if (outputFormat == "json" && grypeConfig != null) { + def transform_script = resource("transform-grype-scan-results.sh") + writeFile file: "transform-results.sh", text: transform_script + def transformed_results = sh script: "/bin/bash ./transform-results.sh ${rawResultsFile} ${grypeConfig}", returnStdout: true + writeFile file: transformedResultsFile, text: transformed_results.trim() + // archive the results + archiveArtifacts artifacts: "${rawResultsFile}, ${transformedResultsFile}", allowEmptyArchive: true + } + else { + archiveArtifacts artifacts: "${rawResultsFile}", allowEmptyArchive: true + } + } + } + } + stash "workspace" + + if(shouldFail){ + error "One or more image scans with Grype failed" + } + } + } +} diff --git a/libraries/grype/test/ContainerImageScanSpec.groovy b/libraries/grype/test/ContainerImageScanSpec.groovy new file mode 100644 index 00000000..9757ced7 --- /dev/null +++ b/libraries/grype/test/ContainerImageScanSpec.groovy @@ -0,0 +1,212 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.grype +import JTEPipelineSpecification + + +public class ContainerImageScanSpec extends JTEPipelineSpecification { + + def ContainerImageScan = null + + def setup() { + ContainerImageScan = loadPipelineScriptForStep("grype", "container_image_scan") + ContainerImageScan.getBinding().setVariable("config", [:]) + String grypeConfig = "" + explicitlyMockPipelineStep("inside_sdp_image") + explicitlyMockPipelineStep("login_to_registry") + explicitlyMockPipelineStep("get_images_to_build") + getPipelineMock("sh")([script: 'echo $HOME', returnStdout: true]) >> "/home" + getPipelineMock("sh")([script: 'echo $XDG_CONFIG_HOME', returnStdout: true]) >> "/xdg" + + getPipelineMock("get_images_to_build")() >> { + def images = [] + images << [registry: "test_registry", repo: "image1_repo", context: "image1", tag: "4321dcba"] + images << [registry: "test_registry", repo: "image2_repo", context: "image2", tag: "4321dcbb"] + images << [registry: "test_registry", repo: "image3_repo/qwerty", context: "image3", tag: "4321dcbc"] + return images + } + + } + + def "Unstash workspace before scanning images" () { + + when: + ContainerImageScan() + then: + 1 * getPipelineMock("unstash")("workspace") + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype */}) + } + + def "Login to registry to scan images" () { + when: + ContainerImageScan() + then: + 1 * getPipelineMock("login_to_registry")(_) + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype */}) + } + + def "Grype config is given in pipeline_config.groovy" () { + given: + ContainerImageScan.getBinding().setVariable("config", [grype_config: "/testPath/grype.yaml"]) + explicitlyMockPipelineStep('resource') + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image1_repo-grype-scan-results.json /testPath/grype.yaml", returnStdout:true]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image2_repo-grype-scan-results.json /testPath/grype.yaml", returnStdout: true ]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh qwerty-grype-scan-results.json /testPath/grype.yaml", returnStdout: true ]) >> "test.txt " + when: + ContainerImageScan() + then: + 1 * getPipelineMock("echo")("Grype file explicitly specified in pipeline_config.groovy") + (1.._) * getPipelineMock("sh")({it =~ /^grype .* --config \/testPath\/grype.yaml >> .*/}) + } + + def "Grype config is found at current dir .grype.yaml" () { + given: + explicitlyMockPipelineStep('resource') + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image1_repo-grype-scan-results.json .grype.yaml", returnStdout:true]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image2_repo-grype-scan-results.json .grype.yaml", returnStdout: true ]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh qwerty-grype-scan-results.json .grype.yaml", returnStdout: true ]) >> "test.txt " + when: + ContainerImageScan() + then: + 1 * getPipelineMock("fileExists")(".grype.yaml") >> true + 1 * getPipelineMock("echo")("Found .grype.yaml") + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* --config .grype.yaml >> .*/}) + } + + def "Grype config is found at .grype/config.yaml" () { + given: + explicitlyMockPipelineStep('resource') + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image1_repo-grype-scan-results.json .grype/config.yaml", returnStdout:true]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image2_repo-grype-scan-results.json .grype/config.yaml", returnStdout: true ]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh qwerty-grype-scan-results.json .grype/config.yaml", returnStdout: true ]) >> "test.txt " + when: + ContainerImageScan() + then: + 1 * getPipelineMock("fileExists")(".grype/config.yaml") >> true + 1 * getPipelineMock("echo")("Found .grype/config.yaml") + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* --config .grype\/config.yaml >> .*/}) + } + + def "Grype config is found at user Home path/.grype.yaml" () { + given: + explicitlyMockPipelineStep('resource') + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image1_repo-grype-scan-results.json /home/.grype.yaml", returnStdout:true]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image2_repo-grype-scan-results.json /home/.grype.yaml", returnStdout: true ]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh qwerty-grype-scan-results.json /home/.grype.yaml", returnStdout: true ]) >> "test.txt " + when: + ContainerImageScan() + then: + 1 * getPipelineMock("fileExists")("/home/.grype.yaml") >> true + 1 * getPipelineMock("echo")("Found ~/.grype.yaml") + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* --config \/home\/.grype.yaml >> .*/}) + } + + def "Grype config found at /grype/config.yaml" () { + given: + explicitlyMockPipelineStep('resource') + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image1_repo-grype-scan-results.json /xdg/grype/config.yaml", returnStdout:true]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image2_repo-grype-scan-results.json /xdg/grype/config.yaml", returnStdout: true ]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh qwerty-grype-scan-results.json /xdg/grype/config.yaml", returnStdout: true ]) >> "test.txt " + when: + ContainerImageScan() + then: + 1 * getPipelineMock("fileExists")("/xdg/grype/config.yaml") >> true + 1 * getPipelineMock("echo")("Found /grype/config.yaml") + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* --config \/xdg\/grype\/config.yaml >> .*/}) + } + + def "Check each image is scanned as expected when no extra config is present" () { + when: + ContainerImageScan() + then: + 1 * getPipelineMock("sh")("grype test_registry/image1_repo:4321dcba -o json --fail-on high >> image1_repo-grype-scan-results.json") + 1 * getPipelineMock("sh")("grype test_registry/image2_repo:4321dcbb -o json --fail-on high >> image2_repo-grype-scan-results.json") + 1 * getPipelineMock("sh")("grype test_registry/image3_repo/qwerty:4321dcbc -o json --fail-on high >> qwerty-grype-scan-results.json") + } + + def "Test json format and negligible severity" () { + given: + ContainerImageScan.getBinding().setVariable("config", [report_format: "json", fail_on_severity: "negligible"]) + when: + ContainerImageScan() + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* -o json --fail-on negligible >> .*/}) + } + + def "Test table format and low severity" () { + given: + ContainerImageScan.getBinding().setVariable("config", [report_format: "table", fail_on_severity: "low"]) + when: + ContainerImageScan() + then: + (1.._ ) * getPipelineMock("sh")({it =~ /^grype .* -o table --fail-on low >> .*/}) + } + + def "Test cyclonedx format and medium severity" () { + given: + ContainerImageScan.getBinding().setVariable("config", [report_format: "cyclonedx", fail_on_severity: "medium"]) + when: + ContainerImageScan() + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* -o cyclonedx --fail-on medium >> .*/}) + } + + def "Test table format and high severity" () { + given: + ContainerImageScan.getBinding().setVariable("config", [report_format: "table", fail_on_severity: "high"]) + when: + ContainerImageScan() + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* -o table --fail-on high >> .*/}) + } + + def "Test cyclonedx format and critical severity" () { + given: + ContainerImageScan.getBinding().setVariable("config", [report_format: "cyclonedx", fail_on_severity: "critical"]) + when: + ContainerImageScan() + then: + (1.._) * getPipelineMock("sh")({it =~ /^grype .* -o cyclonedx --fail-on critical >> .*/}) + } + + def "Test Archive artifacts works as expected for json format and not null grype config" () { + given: + ContainerImageScan.getBinding().setVariable("config", [report_format: "json", grype_config: ".grype.yaml"]) + explicitlyMockPipelineStep("resource") + getPipelineMock("sh")([script:"/bin/bash ./transform-results.sh image1_repo-grype-scan-results.json .grype.yaml", returnStdout:true]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh image2_repo-grype-scan-results.json .grype.yaml", returnStdout: true ]) >> "test.txt " + getPipelineMock("sh")([script: "/bin/bash ./transform-results.sh qwerty-grype-scan-results.json .grype.yaml", returnStdout: true ]) >> "test.txt " + when: + ContainerImageScan() + then: + 1 * getPipelineMock("archiveArtifacts.call")([artifacts: "image1_repo-grype-scan-results.json, image1_repo-grype-scan-results.txt", allowEmptyArchive: true ]) + 1 * getPipelineMock("archiveArtifacts.call")([artifacts:"image2_repo-grype-scan-results.json, image2_repo-grype-scan-results.txt", allowEmptyArchive:true]) + 1 * getPipelineMock("archiveArtifacts.call")([artifacts:"qwerty-grype-scan-results.json, qwerty-grype-scan-results.txt", allowEmptyArchive:true]) + + } + + def "Test that error handling works as expected" () { + given: + explicitlyMockPipelineStep("Exception")//("Failed: java.lang.Exception: test") + getPipelineMock("sh")("grype test_registry/image1_repo:4321dcba -o json --fail-on high >> image1_repo-grype-scan-results.json") >> {throw new Exception("test")} + when: + ContainerImageScan() + then: + 1 * getPipelineMock("echo")("Failed: java.lang.Exception: test") + 1 * getPipelineMock("echo")("Grype Quality Gate Failed. There are one or more CVE's that exceed the maximum allowed severity rating!") + 1 * getPipelineMock("stash")("workspace") + 1 * getPipelineMock("error")(_) + } +} + + + diff --git a/libraries/kubernetes/README.md b/libraries/kubernetes/README.md index abb2ac1c..739af5e1 100644 --- a/libraries/kubernetes/README.md +++ b/libraries/kubernetes/README.md @@ -146,8 +146,8 @@ which is when the most recent code change was a **merge** into the given code br The image would be expected to be built from an earlier commit, or while there was an open PR. You can override this default for the entire pipeline by setting the `promote_previous_image` config setting to **false**. -You can also choose whether to promote images for each application environment individually through the `promote_previous_image` application_environment setting. -This application_environment setting takes priority over the config setting. +You can also choose whether to promote images for each application environment individually through the `promote_previous_image` `application_environment` setting. +This `application_environment` setting takes priority over the config setting. An example of these settings' usage: diff --git a/libraries/maven/README.md b/libraries/maven/README.md index c7760174..0fa36d3a 100644 --- a/libraries/maven/README.md +++ b/libraries/maven/README.md @@ -51,3 +51,46 @@ libraries { * The `sdp` library * Access to an appropriate Maven build agent container via the repository defined in your `sdp` library configuration + +## Migrating to 4.0 + +SDP `4.0` reworked this library to use dynamic step aliasing. + +The Maven tool configuration within Jenkins is no longer required to use this library. + +To recreate the previous `maven.run()` functionality of prior versions, the below minimal pipeline configuration and template can be used: + +### Sample Pipeline Configuration + +=== "Post-4.0" + ``` groovy title="pipeline_config.groovy" + libraries { + maven { + build { + stageName = "Maven Build" + buildContainer = 'mvn' + phases = ['clean', 'install'] + options = ['-P integration-test'] + } + } + } + ``` +=== "Pre-4.0" + ``` groovy title="pipeline_config.groovy" + libraries { + maven { + mavenId = "maven" + } + } + ``` + +### Sample Pipeline Template + +=== "Post-4.0" + ``` groovy title="Jenkinsfile" + build() + ``` +=== "Pre-4.0" + ``` groovy title="Jenkinsfile" + maven.run(["clean", "install"], profiles: ["integration-test"]) + ``` diff --git a/libraries/npm/README.md b/libraries/npm/README.md index 73b4d89c..02365e57 100644 --- a/libraries/npm/README.md +++ b/libraries/npm/README.md @@ -32,16 +32,17 @@ libraries { --- -| Field | Description | Default | -| ----------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------- | -| `node_version` | Node version to run NPM within (installed via NVM) | `lts/*` | -| `.stageName` | stage name displayed in the Jenkins dashboard | N/A | -| `.script` | NPM script ran by the step | N/A | +| Field | Description | Default | +| ----------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | --------- | +| `nvm_container` | The container image to use | nvm:1.0.0 | +| `node_version` | Node version to run NPM within (installed via NVM) | `lts/*` | +| `.stageName` | stage name displayed in the Jenkins dashboard | N/A | +| `.script` | NPM script ran by the step | N/A | | `.artifacts` | array of glob patterns for artifacts that should be archived | -| `.npmInstall` | NPM install command to run; npm install can be skipped with value "skip" | `ci` | -| `.env` | environment variables to make available to the NPM process; can include key/value pairs and secrets | `[]` | -| `.env.secrets` | text or username/password credentials to make available to the NPM process; must be present and available in Jenkins credential store | `[]` | -| `.useEslintPlugin` | if the Jenkins ESLint Plugin is installed, will run the `recordIssues` step to send lint results to the plugin dashboard | `false` | +| `.npmInstall` | NPM install command to run; npm install can be skipped with value "skip" | `ci` | +| `.env` | environment variables to make available to the NPM process; can include key/value pairs and secrets | `[]` | +| `.env.secrets` | text or username/password credentials to make available to the NPM process; must be present and available in Jenkins credential store | `[]` | +| `.useEslintPlugin` | if the Jenkins ESLint Plugin is installed, will run the `recordIssues` step to send lint results to the plugin dashboard | `false` | ### Full Configuration Example @@ -206,7 +207,7 @@ libraries { 4. more secrets as needed This example shows the prod Application Environment overriding configs set in the library config. -`source_build.npm_install` is preserved as set in library config, since it isn't overridden by the Application Environment. +`source_build.npmInstall` is preserved as set in library config, since it isn't overridden by the Application Environment. ### Minimal Configuration Example @@ -234,3 +235,24 @@ It's just a key, used to supersede library config with Application Environment c ## Dependencies * The [SDP library](../sdp/) must be loaded inside the `pipeline_config.groovy` file. + +## Migrating from SDP 3.2 to 4.0 + +SDP `4.0` reworked this library to use dynamic step aliasing. + +To recreate the previous `source_build()` and `unit_test()` functionality of version `3.2`, the below minimal pipeline configuration can be used: + +``` groovy title="pipeline_configuration.groovy" +libraries { + npm { + source_build { + stageName = "NPM Source Build" + script = "build" + } + unit_test { + stageName = "NPM Unit Tests" + script = "test" + } + } +} +``` diff --git a/libraries/npm/steps/npm_invoke.groovy b/libraries/npm/steps/npm_invoke.groovy index 3f85e178..f79a7458 100644 --- a/libraries/npm/steps/npm_invoke.groovy +++ b/libraries/npm/steps/npm_invoke.groovy @@ -11,6 +11,8 @@ void call(app_env = [:]) { LinkedHashMap libStepConfig = config?."${stepContext.name}" ?: [:] LinkedHashMap appStepConfig = app_env?.npm?."${stepContext.name}" ?: [:] + String nvmContainer = config?.nvm_container ?: "nvm:1.0.0" + String stageName = appStepConfig?.stageName ?: libStepConfig?.stageName ?: null @@ -32,7 +34,7 @@ void call(app_env = [:]) { // run npm command in nvm container withCredentials(creds) { - inside_sdp_image "nvm:1.0.0", { + inside_sdp_image(nvmContainer) { unstash "workspace" // verify package.json script block has command to run diff --git a/libraries/openshift/README.md b/libraries/openshift/README.md index c071fdda..7f5444bf 100644 --- a/libraries/openshift/README.md +++ b/libraries/openshift/README.md @@ -12,16 +12,16 @@ This library allows you to perform deployments to static or ephemeral applicatio --- -| Step | Description | -| ----------- | ----------- | -| ``deploy_to()`` | Performs a deployment using Helm | +| Step | Description | +|-----------------------------------------------------|-----------------------------------------------------------| +| ``deploy_to()`` | Performs a deployment using Helm | | ``ephemeral(Closure body, ApplicationEnvironment)`` | Creates a short-lived application environment for testing | ## Overview --- -![OpenShift deploy_to diagram](../../assets/images/openshift/Openshift_deploy_to_diagram.png) +![OpenShift deployment diagram](../../assets/images/openshift/Openshift_deploy_to_diagram.png) ## Configuration @@ -163,8 +163,8 @@ which is when the most recent code change was a **merge** into the given code br The image would be expected to be built from an earlier commit, or while there was an open PR. You can override this default for the entire pipeline by setting the `promote_previous_image` config setting to **false**. -You can also choose whether to promote images for each application environment individually through the `promote_previous_image` application_environment setting. -This application_environment setting takes priority over the config setting. +You can also choose whether to promote images for each application environment individually through the `promote_previous_image` `application_environment` setting. +This `application_environment` setting takes priority over the config setting. An example of these settings' usage: @@ -197,18 +197,18 @@ libraries{ OpenShift Library Configuration Options -| Field | Description | Default Value | Defined On | Required | -| ----------- | ----------- | ----------- | ----------- | ----------- | -| `openshift_url` | The OpenShift Console address when specified per application environment | | `[app_env]` | if `url` isn't defined | -| `url` | The OpenShift Console address when specified globally | | library spec | if `openshift_url` isn't defined | -| `helm_configuration_repository` | The GitHub Repository containing the helm chart(s) for this application | | both | Yes | -| `helm_configuration_repository_credential` | The Jenkins credential ID to access the helm configuration GitHub repository | | both | Yes | -| `tiller_namespace` | The tiller namespace for this application | | both | Yes | -| `tiller_credential` | The Jenkins credential ID referencing an OpenShift credential | | both | Yes | -| `tiller_release_name` | The name of the release to deploy | | application environment | if `[app_env].short_name` isn't defined | -| `chart_values_file` | The values file to use for the release | | `[app_env]` | if `[app_env].short_name` isn't defined | -| `helm_chart_branch` | The branch of helm_configuration_repository to use | `master` | `[app_env]` | No | -| `promote_previous_image` | Whether to promote a previously built image | (Boolean) `true` | both | No | +| Field | Description | Default Value | Defined On | Required | +|--------------------------------------------|------------------------------------------------------------------------------|------------------|-------------------------|-----------------------------------------| +| `openshift_url` | The OpenShift Console address when specified per application environment | | `[app_env]` | if `url` isn't defined | +| `url` | The OpenShift Console address when specified globally | | library spec | if `openshift_url` isn't defined | +| `helm_configuration_repository` | The GitHub Repository containing the helm chart(s) for this application | | both | Yes | +| `helm_configuration_repository_credential` | The Jenkins credential ID to access the helm configuration GitHub repository | | both | Yes | +| `tiller_namespace` | The tiller namespace for this application | | both | Yes | +| `tiller_credential` | The Jenkins credential ID referencing an OpenShift credential | | both | Yes | +| `tiller_release_name` | The name of the release to deploy | | application environment | if `[app_env].short_name` isn't defined | +| `chart_values_file` | The values file to use for the release | | `[app_env]` | if `[app_env].short_name` isn't defined | +| `helm_chart_branch` | The branch of helm_configuration_repository to use | `master` | `[app_env]` | No | +| `promote_previous_image` | Whether to promote a previously built image | (Boolean) `true` | both | No | ```groovy application_environments{ diff --git a/libraries/owasp_dep_check/README.md b/libraries/owasp_dep_check/README.md index 095e5167..22cfab5d 100644 --- a/libraries/owasp_dep_check/README.md +++ b/libraries/owasp_dep_check/README.md @@ -13,8 +13,8 @@ The OWASP Dependency Check library will use the namesake tool to scan a project' --- -| Step | Description | -| ----------- | ----------- | +| Step | Description | +| ------------------------------- | ----------------------------------------------------------------------------- | | `application_dependency_scan()` | Uses the OWASP Dependency Check CLI to perform an application dependency scan | ## Configuration @@ -23,19 +23,21 @@ The OWASP Dependency Check library will use the namesake tool to scan a project' OWASP Dependency Check Library Configuration Options -| Field | Description | Default Value | -| ----------- | ----------- | ----------- | -| `scan` | ArrayList of Ant style paths to scan | `[ '.' ]` | -| `exclude` | ArrayList of Ant style paths to exclude | `[ ]` | -| `cvss_threshold` | A number between 0 and 10, inclusive, representing the failure threshold for vulnerabilities (**note:** will never fail unless a threshold is provided) | | -| `image_tag` | The tag for the scanner docker image used | `latest` | +| Field | Description | Default Value | +| ------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | +| `scan` | ArrayList of Ant style paths to scan | `[ '.' ]` | +| `exclude` | ArrayList of Ant style paths to exclude | `[ ]` | +| `cvss_threshold` | A number between 0 and 10, inclusive, representing the failure threshold for vulnerabilities (**note:** will never fail unless a threshold is provided) | | +| `allow_suppression_file` | Allows whitelisting vulnerabilities using a suppression XML file | `true` | +| `suppression_file` | Path to the suppression file (see [here](https://jeremylong.github.io/DependencyCheck/general/suppression.html) for how to create a suppression file) | `dependency-check-suppression.xml` | +| `image_tag` | The tag for the scanner Docker image used | `latest` | ## Example Configuration Snippet --- ```groovy -libraries{ +libraries { owasp_dep_check { scan = [ "src" ] cvss_threshold = 9 @@ -54,7 +56,7 @@ The `application_dependency_scan` step archives artifacts in multiple formats: H --- From the [Wikipedia article](https://en.wikipedia.org/wiki/Common_Vulnerability_Scoring_System), ->The Common Vulnerability Scoring System (CVSS) is a free and open industry standard for assessing the severity of computer system security vulnerabilities [...] Scores range from 0 to 10, with 10 being the most severe +> The Common Vulnerability Scoring System (CVSS) is a free and open industry standard for assessing the severity of computer system security vulnerabilities. Scores range from 0 to 10, with 10 being the most severe. The pipeline can fail if a vulnerability is detected at or above a given threshold. This threshold is set with the `cvss_threshold` configuration option. diff --git a/libraries/owasp_dep_check/steps/application_dependency_scan.groovy b/libraries/owasp_dep_check/steps/application_dependency_scan.groovy index ff25d3fb..5e319f64 100644 --- a/libraries/owasp_dep_check/steps/application_dependency_scan.groovy +++ b/libraries/owasp_dep_check/steps/application_dependency_scan.groovy @@ -6,33 +6,49 @@ package libraries.owasp_dep_check.steps void call() { - stage('Application Dependency Scan: OWASP Dep Checker'){ + stage('Application Dependency Scan: OWASP Dep Checker') { String resultsDir = "owasp-dependency-check" - String args = "--out ${resultsDir} --enableExperimental --format ALL" + String args = "--out ${resultsDir} --enableExperimental --format ALL" - ArrayList scan = config.scan ?: [ '.' ] + ArrayList scan = config?.scan ?: [ '.' ] scan.each{ s -> args += " -s ${s}" } - ArrayList exclude = config.exclude ?: [] + ArrayList exclude = config?.exclude ?: [] exclude.each{ e -> args += " --exclude ${e}" } // vulnerabilities greater than this will fail the build // max value 10 - if(config.containsKey("cvss_threshold")){ - Double threshold = config.cvss_threshold - if(threshold <= 10.0){ + if (config?.containsKey("cvss_threshold")) { + Double threshold = config?.cvss_threshold + if (threshold <= 10.0) { args += " --failOnCVSS ${threshold} --junitFailOnCVSS ${threshold}" } } - String image_tag = config.image_tag ?: "latest" + String image_tag = config?.image_tag ?: "latest" inside_sdp_image "owasp-dep-check:$image_tag", { unstash "workspace" + + // suppress whitelisted vulnerabilities + Boolean allowSuppressionFile = config?.allow_suppression_file ?: true + if (allowSuppressionFile) { + String suppressionFile = config?.suppression_file ?: "dependency-check-suppression.xml" + Boolean suppressionFileExists = fileExists suppressionFile + + if (suppressionFileExists) { + args += " --suppression ${suppressionFile}" + } + else { + echo "\"${suppressionFile}\" does not exist. Skipping suppression." + } + } + + // perform the scan try { sh "mkdir -p ${resultsDir} && mkdir -p owasp-data && /usr/share/dependency-check/bin/dependency-check.sh ${args} -d owasp-data" - }catch (ex) { + } catch (ex) { error "Error occured when running OWASP Dependency Check: ${ex.getMessage()}" - }finally { + } finally { archiveArtifacts allowEmptyArchive: true, artifacts: "${resultsDir}/" junit allowEmptyResults: true, healthScaleFactor: 0.0, testResults: "${resultsDir}/dependency-check-junit.xml" } diff --git a/libraries/owasp_dep_check/test/ApplicationDependencySpec.groovy b/libraries/owasp_dep_check/test/ApplicationDependencySpec.groovy new file mode 100644 index 00000000..6186b2ea --- /dev/null +++ b/libraries/owasp_dep_check/test/ApplicationDependencySpec.groovy @@ -0,0 +1,63 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.owasp_dep_check + +public class ApplicationDependencyScanSpec extends JTEPipelineSpecification { + def ApplicationDependencyScan = null + + String fileDoesNotExistWarning = "\"dependency-check-suppression.xml\" does not exist. Skipping suppression." + + String commandBeginning = "mkdir -p owasp-dependency-check && mkdir -p owasp-data && /usr/share/dependency-check/bin/dependency-check.sh" + String defaultArgs = "--out owasp-dependency-check --enableExperimental --format ALL -s ." + String expectedAdditionalArgs = "" + String commandEnd = "-d owasp-data" + + def setup() { + ApplicationDependencyScan = loadPipelineScriptForStep("owasp_dep_check", "application_dependency_scan") + + ApplicationDependencyScan.getBinding().setVariable("config", [:]) + + explicitlyMockPipelineStep("inside_sdp_image") + } + + def "Does not print warning message if the suppression file is found" () { + setup: + getPipelineMock("fileExists")(_) >> { return true } + when: + ApplicationDependencyScan() + then: + 0 * getPipelineMock("echo")(fileDoesNotExistWarning) + } + + def "Prints warning message if the suppression file is not found" () { + setup: + getPipelineMock("fileExists")(_) >> { return false } + when: + ApplicationDependencyScan() + then: + 1 * getPipelineMock("echo")(fileDoesNotExistWarning) + } + + def "Uses --suppression flag when using suppression file" () { + setup: + getPipelineMock("fileExists")(_) >> { return true } + expectedAdditionalArgs = " --suppression dependency-check-suppression.xml" + when: + ApplicationDependencyScan() + then: + 1 * getPipelineMock("sh")("${commandBeginning} ${defaultArgs}${expectedAdditionalArgs} ${commandEnd}") + } + + def "Does not use --supppression flag when not using suppression file" () { + setup: + getPipelineMock("fileExists")(_) >> { return false } + expectedAdditionalArgs = "" + when: + ApplicationDependencyScan() + then: + 1 * getPipelineMock("sh")("${commandBeginning} ${defaultArgs}${expectedAdditionalArgs} ${commandEnd}") + } +} diff --git a/libraries/sonarqube/steps/dotnet_scanner_analysis.groovy b/libraries/sonarqube/steps/dotnet_scanner_analysis.groovy new file mode 100644 index 00000000..c1dc7017 --- /dev/null +++ b/libraries/sonarqube/steps/dotnet_scanner_analysis.groovy @@ -0,0 +1,97 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.sonarqube.steps +import jenkins.model.Jenkins +import com.cloudbees.plugins.credentials.Credentials +import com.cloudbees.plugins.credentials.CredentialsProvider +import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl +import org.jenkinsci.plugins.plaincredentials.impl.StringCredentialsImpl +import hudson.plugins.sonar.SonarGlobalConfiguration + +void call() { + + // default values for config options + LinkedHashMap defaults = [ + credential_id: "sonarqube-token", + unity_app: false, + image: "dotnet-sonar-scanner:5.2.2-1.1", + installation_name: "SonarQube", + stage_display_name: "SonarQube Dotnet Analysis", + unstash: [ "workspace" ], + scanner_begin_parameters: [], + dotnet_build_parameters: [], + scanner_end_parameters: [] + ] + + // name of installation to use, as configured in Manage Jenkins > Configure System > SonarQube Installations + String installation_name = config.installation_name ?: defaults.installation_name + + // whether or not this is a unity build + Boolean unity_app = defaults.unity_app + if(config.containsKey("unity_app")){ + unity_app = config.unity_app + } + + // credential ID for SonarQube Auth + String cred_id = config.credential_id ?: defaults.credential_id + + //sonar project key + String sonar_project_key = config.sonar_project_key ?: ''; + // dotnet sonarscanner does not use properties file. Try to get project key from env + if(sonar_project_key.isEmpty()){ + if ((env.ORG_NAME ?: '').isEmpty()){ + sonar_project_key = "${env.REPO_NAME}" + } else { + sonar_project_key = "${env.ORG_NAME}:${env.REPO_NAME}" + } + } + + // purely aesthetic. the name of the "Stage" for this task. + String stage_display_name = config.stage_display_name ?: defaults.stage_display_name + + // sets image to use + String image = config.image ?: defaults.image + + ArrayList unstashList = config.unstash ?: defaults.unstash + + // if a unity project, build the unity solution + if (unity_app) + build_unity() + + stage(stage_display_name) { + inside_sdp_image image, { + withCredentials([string(credentialsId: cred_id, variable: 'sq_token')]) { + withSonarQubeEnv(installation_name){ + + // fetch the source code + unstash "workspace" + + // build commands to execute + // start with base command... + ArrayList scanner_begin_command = [ "dotnet-sonarscanner begin" ] + ArrayList dotnet_build_command = [ "dotnet build" ] + ArrayList scanner_end_command = [ "dotnet-sonarscanner end" ] + + scanner_begin_command << "/k:'${sonar_project_key}' /d:sonar.login='${env.sq_token}' /d:sonar.host.url='${SONAR_HOST_URL}'" + scanner_end_command << "/d:sonar.login='${env.sq_token}'" + + // then join user provided params + scanner_begin_command << (config.scanner_begin_parameters ?: defaults.scanner_begin_parameters) + dotnet_build_command << (config.dotnet_build_parameters ?: defaults.dotnet_build_parameters) + scanner_end_command << (config.scanner_end_parameters ?: defaults.scanner_end_parameters) + + // begin dotnet sonar scan + sh scanner_begin_command.flatten().join(" ") + + // run dotnet build on sln + sh dotnet_build_command.flatten().join(" ") + // end dotnet sonar scan, send results to sonar server + sh scanner_end_command.flatten().join(" ") + } + } + } + } +} diff --git a/libraries/sonarqube/steps/scanner_analysis.groovy b/libraries/sonarqube/steps/scanner_analysis.groovy new file mode 100644 index 00000000..20b04b4e --- /dev/null +++ b/libraries/sonarqube/steps/scanner_analysis.groovy @@ -0,0 +1,160 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.sonarqube.steps + +import jenkins.model.Jenkins +import com.cloudbees.plugins.credentials.Credentials +import com.cloudbees.plugins.credentials.CredentialsProvider +import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl +import org.jenkinsci.plugins.plaincredentials.impl.StringCredentialsImpl +import hudson.plugins.sonar.SonarGlobalConfiguration + +def call(){ + + // default values for config options + LinkedHashMap defaults = [ + credential_id: "sonarqube", + wait_for_quality_gate: true, + enforce_quality_gate: true, + installation_name: "SonarQube", + timeout_duration: 1, + timeout_unit: "HOURS", + stage_display_name: "SonarQube Analysis", + unstash: [ "test-results" ], + cli_parameters: [] + ] + + // whether or not to wait for the quality gate + Boolean wait = defaults.wait_for_quality_gate + if(config.containsKey("wait_for_quality_gate")){ + wait = config.wait_for_quality_gate + } + // whether or not to enforce the SQ QG + Boolean enforce = defaults.enforce_quality_gate + if(config.containsKey("enforce_quality_gate")){ + enforce = config.enforce_quality_gate + } + + // name of installation to use, as configured in Manage Jenkins > Configure System > SonarQube Installations + String installation_name = config.installation_name ?: defaults.installation_name + validateInstallationExists(installation_name) + + // credential ID for SonarQube Auth + String cred_id = config.credential_id ?: fetchCredentialFromInstallation(installation_name) ?: defaults.credential_id + + // purely aesthetic. the name of the "Stage" for this task. + String stage_display_name = config.stage_display_name ?: defaults.stage_display_name + + // timeout settings + def timeout_duration = config.timeout_duration ?: defaults.timeout_duration + String timeout_unit = config.timeout_unit ?: defaults.timeout_unit + + ArrayList unstashList = config.unstash ?: defaults.unstash + + stage(stage_display_name){ + inside_sdp_image "sonar-scanner", { + withCredentials(determineCredentialType(cred_id)) { + withSonarQubeEnv(installation_name){ + // fetch the source code + unstash "workspace" + + /* + checks for the existence of a stash called "test-results" + which may have been created by previous steps to store results + that sonarqube will consume + */ + unstashList.each{ -> + try{ unstash it }catch(ex){} + } + + /* + creates an empty directory in the event that a value for + sonar.java.binaries needs to be provided when the binaries + are not present during sonarqube analysis + */ + sh "mkdir -p empty" + + // build out the command to execute + ArrayList command = [ "sonar-scanner -X" ] + + /* + if an API token was used, only provide -Dsonar.login + if a username/password was used, provide both -Dsonar.login and -Dsonar.password + + because of how determineCredentialType() works - the env var sq_user will + only be present if a username/password was provided. + */ + if(env.sq_user){ + command << "-Dsonar.login='${env.sq_user}' -Dsonar.password='${env.sq_token}'" + } else { + command << "-Dsonar.login='${env.sq_token}'" + } + + // join user provided params + command << (config.cli_parameters ?: defaults.cli_parameters) + + sh command.flatten().join(" ") + + } + + if(wait){ + timeout(time: timeout_duration, unit: timeout_unit) { + def qg = waitForQualityGate() + if (qg.status != 'OK' && enforce) { + error "Pipeline aborted due to quality gate failure: ${qg.status}" + } + } + } + } + } + } +} + +def determineCredentialType(String cred_id){ + def allCreds = CredentialsProvider.lookupCredentials(Credentials, Jenkins.get(),null, null) + def cred = allCreds.find{ it.id.equals(cred_id) } + + if(cred == null){ + error "SonarQube: Credential with id '${cred_id}' does not exist." + } + + if(!(cred.getClass() in [UsernamePasswordCredentialsImpl, StringCredentialsImpl])){ + error """ + SonarQube: Credential with id '${cred_id}' must be either: + 1. a valid username/password for SonarQube + 2. a secret text credential storing an API Token. + Found credential type: ${cred.getClass()} + """.trim().stripIndent(8) + } + + if(cred instanceof UsernamePasswordCredentialsImpl){ + return [ usernamePassword(credentialsId: cred_id, passwordVariable: 'sq_token', usernameVariable: 'sq_user') ] + } + + if(cred instanceof StringCredentialsImpl){ + return [ string(credentialsId: cred_id, variable: 'sq_token') ] + } +} + +void validateInstallationExists(installation_name){ + boolean exists = SonarGlobalConfiguration.get().getInstallations().find{ + it.getName() == installation_name + } as boolean + if(!exists){ + error "SonarQube: installation '${installation_name}' does not exist" + } +} + +/* + when not set - this returns an empty string, "" + which evaluates to false when used in an elvis operator. +*/ +String fetchCredentialFromInstallation(installation_name){ + String id = SonarGlobalConfiguration.get().getInstallations().find{ + it.getName() == installation_name + }.getCredentialsId() + return id +} \ No newline at end of file diff --git a/libraries/sonarqube/test/DotnetScannerAnalysisSpec.groovy b/libraries/sonarqube/test/DotnetScannerAnalysisSpec.groovy new file mode 100644 index 00000000..7329cbb7 --- /dev/null +++ b/libraries/sonarqube/test/DotnetScannerAnalysisSpec.groovy @@ -0,0 +1,55 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.dotnet + +public class DotnetScannerAnalysisSpec extends JTEPipelineSpecification { + def DotnetScannerAnalysis = null + + LinkedHashMap minimalSourceBuildConfig = [ + source_build: [ + stepName: "dotnet_scanner_analysis", + outDir: "OutTest" + ] + ] + LinkedHashMap minimalUnitTestConfig = [ + unit_test: [ + stepName: "unit_test", + resultDir: "test" + ] + ] + + def setup() { + explicitlyMockPipelineStep("inside_sdp_image") + explicitlyMockPipelineStep("login_to_registry") + explicitlyMockPipelineStep("dotnet_scanner_analysis") + explicitlyMockPipelineVariable("workspace") + explicitlyMockPipelineVariable("SONAR_HOST_URL") + + DotnetScannerAnalysis = loadPipelineScriptForStep("sonarqube", "dotnet_scanner_analysis") + } + + def "Unit tests run successfully" () { + setup: + DotnetScannerAnalysis.getBinding().setVariable("config", [unit_test: [resultDir: "test"]]) + when: + DotnetScannerAnalysis() + then: + noExceptionThrown() + 1 * getPipelineMock("inside_sdp_image").toString() + 1 * getPipelineMock("stage").toString() + 1 * getPipelineMock("withSonarQubeEnv.call").toString() + 1 * getPipelineMock("withCredentials").toString() + 1 * getPipelineMock("string.call").toString() + 1 * getPipelineMock("unstash").toString() + 1 * getPipelineMock("env.getProperty").call('REPO_NAME') + 1 * getPipelineMock("string.call").call(['credentialsId':'sonarqube-token', 'variable':'sq_token']) + 2 * getPipelineMock("env.getProperty").call('sq_token') + 1 * getPipelineMock("unstash").call('workspace') + 1 * getPipelineMock("env.getProperty").call('ORG_NAME') + + } + +} diff --git a/libraries/sonarqube/test/StaticCodeAnalysisSpec.groovy b/libraries/sonarqube/test/StaticCodeAnalysisSpec.groovy new file mode 100644 index 00000000..130b52de --- /dev/null +++ b/libraries/sonarqube/test/StaticCodeAnalysisSpec.groovy @@ -0,0 +1,62 @@ +/* + Copyright © 2018 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.sonarqube + +public class StaticCodeAnalysisSpec extends JTEPipelineSpecification { + def StaticCodeAnalysis = null + + public static class DummyException extends RuntimeException { + public DummyException(String _message) { super (_message); } + } + + LinkedHashMap minimalUnitTestConfig = [ + unit_test: [ + stepName: "unit_test", + resultDir: "test" + ] + ] + + + def setup() { + explicitlyMockPipelineStep("dotnet_scanner_analysis") + StaticCodeAnalysis = loadPipelineScriptForStep("sonarqube", "static_code_analysis") + explicitlyMockPipelineVariable("out") + } + + + + def "Pipeline Fails When Config Is Undefined" () { + setup: + explicitlyMockPipelineStep("scanner_analysis") + StaticCodeAnalysis.getBinding().setVariable("config", null) + when: + StaticCodeAnalysis() // Run the pipeline step we loaded, with no parameters + then: + 1 * getPipelineMock("scanner_analysis").call() + 1 * getPipelineMock("scanner_analysis").toString() + } + + def "Pipeline has an error caught in try catch block" () { + setup: + explicitlyMockPipelineStep("sh") + explicitlyMockPipelineStep("dotnet_scanner_analysis") + explicitlyMockPipelineStep("scanner_analysis") + getPipelineMock("sh")("echo 'This is for Dummy Test'") >> { throw new DummyException("This is for Dummy Test")} + when: + try { + StaticCodeAnalysis() // Run the pipeline step we loaded, with no parameters + } catch( DummyException e ) {} + then: + // 1 * getPipelineMock("dotnet_scanner_analysis")("ERROR: config is not defined") + 1 * getPipelineMock("scanner_analysis").call() + 1 * getPipelineMock("scanner_analysis").toString() + //1 * getPipelineMock("sh")( _ as Map ) + 1 * getPipelineMock("sh").toString() + + +} + +} \ No newline at end of file diff --git a/libraries/syft/README.md b/libraries/syft/README.md new file mode 100644 index 00000000..54986ac6 --- /dev/null +++ b/libraries/syft/README.md @@ -0,0 +1,36 @@ +--- +description: This library allows you to generate a Software Bill of Materials (SBOM) for each container built in your project +--- + +# Syft + +This library allows you to generate a Software Bill of Materials (SBOM) for each container built in your project using the [Syft tool](https://github.com/anchore/syft). + +## Steps + +| Step | Description | +|-------------------|--------------------------------------------------| +| `generate_sbom()` | Generates and archives SBOM files in JSON format | + +## Configuration + +| Library Configuration | Description | Type | Default Value | Options | +|-----------------------|---------------------------------------------------------------|-------------|---------------------|-----------------------------------------------------------------------------------------------------------| +| `raw_results_file` | The base name of the report file generated. Omit Extension. | String | `syft-sbom-results` | | +| `sbom_container` | Name of the container image containing the syft executable. | String | `syft:0.47.0` | | +| `sbom_format` | The valid formats a report can be generated in. | ArrayList | `['json']` | `['json', 'text', 'cyclonedx-xml', 'cyclonedx-json', 'spdx-tag-value', 'spdx-json', 'github', 'table']` | + +``` groovy title='pipeline_config.groovy' +libraries { + syft { + raw_results_file = "syft-scan" + sbom_container = "syft:v0.47.0" + sbom_format = ['json', 'spdx-json', 'table'] + } +} +``` + +## Dependencies + +* Base SDP library +* Docker SDP library diff --git a/libraries/syft/library_config.groovy b/libraries/syft/library_config.groovy new file mode 100644 index 00000000..f4217e1f --- /dev/null +++ b/libraries/syft/library_config.groovy @@ -0,0 +1,9 @@ +fields { + required { + } + optional { + raw_results_file = String + sbom_container = String + sbom_format = ArrayList + } +} diff --git a/libraries/syft/steps/generate_sbom.groovy b/libraries/syft/steps/generate_sbom.groovy new file mode 100644 index 00000000..fd18d940 --- /dev/null +++ b/libraries/syft/steps/generate_sbom.groovy @@ -0,0 +1,62 @@ +/* + Copyright © 2022 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. + The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ +package libraries.syft.steps + +void call() { + stage('Generate SBOM using Syft') { + //Import settings from config + String raw_results_file = config?.raw_results_file ?: 'syft-sbom-results' // leave off file extension so that it can be added based off off selected formats + String sbom_container = config?.sbom_container ?: 'syft:0.47.0' + ArrayList sbom_format = config?.sbom_format ?: ["json"] + String artifacts = "" + boolean shouldFail = false + + //Get list of images to scan (assuming same set built by Docker) + def images = get_images_to_build() + inside_sdp_image "${sbom_container}", { + login_to_registry { + unstash "workspace" + images.each { img -> + String ARGS = "-q" + String results_name = "${img.repo}-${img.tag}-${raw_results_file}".replaceAll("/","-") + sbom_format.each { format -> + String formatter = "" + if(format == "json" || format == "cyclonedx-json" || format == "spdx-json" || format == "github") { + formatter += "${results_name}-${format}.json" + } + else if(format == "text" || format == "spdx-tag-value" || format == "table") { + formatter += "${results_name}-${format}.txt" + } + else if (format == "cyclonedx-xml") { + formatter += "${results_name}-${format}.xml" + } + + ARGS += " -o ${format}=${formatter} " + artifacts += "${formatter}," + } + + // perform the syft scan + try { + sh "syft ${img.registry}/${img.repo}:${img.tag} ${ARGS}" + } + catch(Exception err) { + shouldFail = true + echo "SBOM generation Failed: ${err}" + } + finally { + if(shouldFail){ + error("SBOM Stage Failed") + } + else { + archiveArtifacts artifacts: "${artifacts.replaceAll(',$', "")}" + } + } + } + stash "workspace" + } + } + } +} diff --git a/libraries/syft/test/GenerateSBOMSpec.groovy b/libraries/syft/test/GenerateSBOMSpec.groovy new file mode 100644 index 00000000..fa9a5e4c --- /dev/null +++ b/libraries/syft/test/GenerateSBOMSpec.groovy @@ -0,0 +1,45 @@ +/* + Copyright © 2022 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. + The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.syft + +public class GenerateSBOMSpec extends JTEPipelineSpecification { + def GenerateSBOM = null + + def setup() { + GenerateSBOM = loadPipelineScriptForStep("syft", "generate_sbom") + + GenerateSBOM.getBinding().setVariable("config", [:]) + + explicitlyMockPipelineStep("login_to_registry") + explicitlyMockPipelineStep("inside_sdp_image") + explicitlyMockPipelineVariable("get_images_to_build") + + getPipelineMock("get_images_to_build.call")() >> { + def images = [] + images << [registry: "ghcr.io/boozallen/sdp-images", repo: "syft", context: "syft", tag: "latest"] + images << [registry: "ghcr.io/boozallen/sdp-images", repo: "grype", context: "grype", tag: "latest"] + return images + } + } + + def "Generates Software Bill of Materials file" () { + given: + GenerateSBOM.getBinding().setVariable("config", [sbom_format: ["json"]]) + when: + GenerateSBOM() + then: + 1 * getPipelineMock('sh').call('syft ghcr.io/boozallen/sdp-images/syft:latest -q -o json=syft-latest-syft-sbom-results-json.json ') + 1 * getPipelineMock('sh').call('syft ghcr.io/boozallen/sdp-images/grype:latest -q -o json=grype-latest-syft-sbom-results-json.json ') + } + + def "Archives SBOM file as expected" () { + when: + GenerateSBOM() + then: + 2 * getPipelineMock('archiveArtifacts.call')(_ as Map) + } +} diff --git a/libraries/sysdig_secure/README.md b/libraries/sysdig_secure/README.md index 77d7727f..12cb52cf 100644 --- a/libraries/sysdig_secure/README.md +++ b/libraries/sysdig_secure/README.md @@ -4,7 +4,7 @@ description: Performs container image scanning with Sysdig Secure's inline scann # Sysdig Secure -This library leverages Sysdig Secure's [inline scanning script](https://github.com/sysdiglabs/secure-inline-scan) to scan container images, +This library leverages a script from Sysdig Secure ([inline scanning script](https://github.com/sysdiglabs/secure-inline-scan)) to scan container images, report the information to the Sysdig Secure server, and download a PDF report of the findings. ## Steps diff --git a/libraries/yarn/README.md b/libraries/yarn/README.md new file mode 100644 index 00000000..4cb60390 --- /dev/null +++ b/libraries/yarn/README.md @@ -0,0 +1,240 @@ +--- +description: Run Yarn script commands in an NVM container with a specified Node version +--- + +# Yarn + +Run Yarn script commands in an NVM container with a specified Node version. + +## Configuration + +All configs can be set in either the library config or the Application Environment. All configs set in Application Environment take precedence. + +Environment variables and secrets set in the library config are concatenated with those set in the Application Environment. +Environment variables and secrets with the same key are set to the definition contained in the Application Environment. + +## Steps + +Steps are configured dynamically in either the library config or the Application Environment. + +``` groovy title="pipeline_configuration.groovy" +libraries { + yarn { + [step_name] { + // config fields described below + } + ... + } +} +``` + +## Example Library Configuration + +--- + +| Field | Description | Default | +| ----------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | ----------------- | +| `nvm_container` | The container image to use | nvm:1.0.0 | +| `node_version` | Node version to run Yarn within (installed via NVM) | `lts/*` | +| `yarn_version` | Yarn version to use | `latest` | +| `.stageName` | stage name displayed in the Jenkins dashboard | N/A | +| `.script` | Yarn script ran by the step | N/A | +| `.artifacts` | array of glob patterns for artifacts that should be archived | +| `.yarnInstall` | Yarn install command to run; Yarn install can be skipped with value "skip" | `frozen-lockfile` | +| `.env` | environment variables to make available to the Yarn process; can include key/value pairs and secrets | `[]` | +| `.env.secrets` | text or username/password credentials to make available to the Yarn process; must be present and available in Jenkins credential store | `[]` | +| `.useEslintPlugin` | if the Jenkins ESLint Plugin is installed, will run the `recordIssues` step to send lint results to the plugin dashboard | `false` | + +### Full Configuration Example + +Each available method has config options that can be specified in the Application Environment or within the library configuration. + +``` groovy title="pipeline_configuration.groovy" +application_environments { + dev + prod { + yarn { + node_version = "14.16.1" + yarn_version = "1.22.17" + unit_test { + stageName = "Yarn Unit Tests" + script = "full-test-suite" + artifacts = ["coverage/lcov.info"] + yarnInstall = "frozen-lockfile" + env { + someKey = "prodValue for tests" + // (1) + secrets{ + someTextCredential { + type = "text" + name = "VARIABLE_NAME" + id = "prod-credential-id" + } + someUsernamePasswordCredential { + type = "usernamePassword" + usernameVar = "USER" + passwordVar = "PASS" + id = "prod-credential-id" + } + // (2) + } + } + } + source_build { + stageName = "Yarn Source Build" + script = "prod-build" + env { + someKey = "prodValue for builds" + secrets { + someTextCredential { + type = "text" + name = "VARIABLE_NAME" + id = "prod-credential-id" + } + someUsernamePasswordCredential { + type = "usernamePassword" + usernameVar = "USER" + passwordVar = "PASS" + id = "prod-credential-id" + } + } + } + } + } + lint_code { + stageName = "Yarn Lint Code" + script = "lint" + artifacts = [ + "eslint-report.json", + "eslint-report.html", + "eslint-report.xml", + ] + useEslintPlugin = true + env { + someKey = "prodValue for linting" + secrets { + someTextCredential { + type = "text" + name = "VARIABLE_NAME" + id = "prod-credential-id" + } + someUsernamePasswordCredential { + type = "usernamePassword" + usernameVar = "USER" + passwordVar = "PASS" + id = "prod-credential-id" + } + } + } + } + } + } +} + +libraries { + yarn { + node_version = "lts/*" + yarn_version = "latest" + unit_test { + stageName = "Yarn Unit Tests" + script = "test" + yarnInstall = "install" + env { + someKey = "someValue for tests" + // (3) + secrets { + someTextCredential { + type = "text" + name = "VARIABLE_NAME" + id = "some-credential-id" + } + someUsernamePasswordCredential { + type = "usernamePassword" + usernameVar = "USER" + passwordVar = "PASS" + id = "some-credential-id" + } + // (4) + } + } + } + source_build { + stageName = "Yarn Source Build" + script = "build" + yarnInstall = "skip" + env { + someKey = "someValue for builds" + secrets { + someTextCredential { + type = "text" + name = "VARIABLE_NAME" + id = "some-credential-id" + } + someUsernamePasswordCredential { + type = "usernamePassword" + usernameVar = "USER" + passwordVar = "PASS" + id = "some-credential-id" + } + } + } + } + lint_code { + stageName = "Yarn Lint Code" + script = "lint" + yarnInstall = "skip" + env { + someKey = "someValue for linting" + secrets { + someTextCredential { + type = "text" + name = "VARIABLE_NAME" + id = "some-credential-id" + } + someUsernamePasswordCredential { + type = "usernamePassword" + usernameVar = "USER" + passwordVar = "PASS" + id = "some-credential-id" + } + } + } + } + } +} +``` + +1. more envVars as needed +2. more secrets as needed +3. more envVars as needed +4. more secrets as needed + +This example shows the prod Application Environment overriding configs set in the library config. +`source_build.yarnInstall` is preserved as set in library config, since it isn't overridden by the Application Environment. + +### Minimal Configuration Example + +The minimal configuration for this library is: + +``` groovy title="pipeline_configuration.groovy" +libraries { + yarn { + unit_test { + stageName = "Yarn Unit Tests" + script = "test" + } + } +} +``` + +### Secrets + +There are two types of secrets currently supported: secret text and username/password credentials. +These credentials must be stored in the Jenkins credential store and be available to the pipeline. + +The name of each credential block (such as `someTextCredential`) is arbitrary. +It's just a key, used to supersede library config with Application Environment configs, and when describing configuration errors found by the step. + +## Dependencies + +* The [SDP library](../sdp/) must be loaded inside the `pipeline_config.groovy` file. diff --git a/libraries/yarn/steps/yarn_invoke.groovy b/libraries/yarn/steps/yarn_invoke.groovy new file mode 100644 index 00000000..3f4e36eb --- /dev/null +++ b/libraries/yarn/steps/yarn_invoke.groovy @@ -0,0 +1,184 @@ +/* + Copyright © 2022 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.yarn.steps + +@StepAlias(dynamic = { return config.keySet() }) +void call(app_env = [:]) { + // Get config for step + LinkedHashMap libStepConfig = config?."${stepContext.name}" ?: [:] + LinkedHashMap appStepConfig = app_env?.yarn?."${stepContext.name}" ?: [:] + + String nvmContainer = config?.nvm_container ?: "nvm:1.0.0" + + String stageName = appStepConfig?.stageName ?: + libStepConfig?.stageName ?: + null + + if (!stageName) { + error("No stage name found for step: " + stepContext.name) + } + + def artifacts = appStepConfig?.artifacts ?: + libStepConfig?.artifacts ?: + [] as String[] + + stage(stageName) { + // Gather, validate and format secrets to pull from credential store + ArrayList creds = this.formatSecrets(libStepConfig, appStepConfig) + + // Gather and set non-secret environment variables + this.setEnvVars(libStepConfig, appStepConfig, config, app_env) + + // run Yarn command in nvm container + withCredentials(creds) { + inside_sdp_image(nvmContainer) { + unstash "workspace" + + // verify package.json script block has command to run + def packageJson = readJSON(file: "package.json") + if (!packageJson?.scripts?.containsKey(env.scriptCommand)) { + error("script: '$env.scriptCommand' not found in package.json scripts") + } + + try { + if (env.yarnInstall != "skip") { + // run script command after installing dependencies + sh ''' + set +x + source ~/.bashrc + nvm install $node_version + nvm version + + npm install -g yarn@$yarn_version + + echo 'Running with Yarn install' + yarn $yarnInstall + yarn $scriptCommand + ''' + } + else { + // run script command without installing dependencies + sh ''' + set +x + source ~/.bashrc + nvm install $node_version + nvm version + + npm install -g yarn@$yarn_version + + echo 'Running without Yarn install' + yarn $scriptCommand + ''' + } + } + catch (any) { + throw any + } + finally { + // archive artifacts + artifacts.each{ artifact -> + archiveArtifacts artifacts: artifact, allowEmptyArchive: true + } + + // check if using ESLint plugin + def usingEslintPlugin = appStepConfig?.useEslintPlugin ?: + libStepConfig?.useEslintPlugin ?: + false + + if (usingEslintPlugin) { + recordIssues enabledForFailure: true, tool: esLint(pattern: 'eslint-report.xml') + } + } + } + } + } +} + +void validateSecrets(secrets) { + ArrayList errors = [] + secrets.keySet().each{ key -> + def secret = secrets[key] + println "secret -> ${secret}" + if (!secret.id) { + errors << "secret '${key}' must define 'id'" + } + switch(secret.type) { + case "text": + if (!secret.name) errors << "secret '${key}' must define 'name'" + break + case "usernamePassword": + if (!secret.usernameVar) errors << "secret '${key}' must define 'usernameVar'" + if (!secret.passwordVar) errors << "secret '${key}' must define 'passwordVar'" + break + default: + errors << "secret '${key}': type '${secret.type}' is not defined" + } + } + + if (errors) { + error (["Yarn Library Validation Errors: "] + errors.collect{ "- ${it}"})?.join("\n") + } +} + +ArrayList formatSecrets(libStepConfig, appStepConfig) { + LinkedHashMap libSecrets = libStepConfig?.env?.secrets ?: [:] + LinkedHashMap envSecrets = appStepConfig?.env?.secrets ?: [:] + LinkedHashMap secrets = libSecrets + envSecrets + + this.validateSecrets(secrets) + + ArrayList creds = [] + secrets.keySet().each{ key -> + def secret = secrets[key] + switch(secret.type) { + case "text": + creds << string(credentialsId: secret.id, variable: secret.name) + break + case "usernamePassword": + creds << usernamePassword(credentialsId: secret.id, usernameVariable: secret.usernameVar, passwordVariable: secret.passwordVar) + break + } + } + return creds +} + +void setEnvVars(libStepConfig, appStepConfig, config, app_env) { + LinkedHashMap libEnv = libStepConfig?.env?.findAll { it.key != 'secrets' } ?: [:] + LinkedHashMap appEnv = appStepConfig?.env?.findAll { it.key != 'secrets' } ?: [:] + LinkedHashMap envVars = libEnv + appEnv + + envVars.each { + env[it.key] = it.value + } + + env.node_version = app_env?.yarn?.node_version ?: + config?.node_version ?: + 'lts/*' + + env.yarn_version = app_env?.yarn?.yarn_version ?: + config?.yarn_version ?: + 'latest' + + String yarnInstall = appStepConfig?.yarnInstall ?: + libStepConfig?.yarnInstall ?: + "frozen-lockfile" + + if (!["install", "frozen-lockfile", "skip"].contains(yarnInstall)) { + error("yarnInstall must be one of \"install\", \"frozen-lockfile\" or \"skip\"; got \"$yarnInstall\"") + } + + env.yarnInstall = (yarnInstall == "frozen-lockfile") + ? "install --frozen-lockfile" + : yarnInstall + + env.scriptCommand = appStepConfig?.script ?: + libStepConfig?.script ?: + null + + if (!env.scriptCommand) { + error("No script command found for step: " + stepContext.name) + } +} diff --git a/libraries/yarn/test/YarnInvokeSpec.groovy b/libraries/yarn/test/YarnInvokeSpec.groovy new file mode 100644 index 00000000..9c5745ca --- /dev/null +++ b/libraries/yarn/test/YarnInvokeSpec.groovy @@ -0,0 +1,393 @@ +/* + Copyright © 2022 Booz Allen Hamilton. All Rights Reserved. + This software package is licensed under the Booz Allen Public License. The license can be found in the License file or at http://boozallen.github.io/licenses/bapl +*/ + +package libraries.yarn + +public class YarnInvokeSpec extends JTEPipelineSpecification { + def YarnInvoke = null + + def shellCommandWithYarnInstall = ''' + set +x + source ~/.bashrc + nvm install $node_version + nvm version + + npm install -g yarn@$yarn_version + + echo 'Running with Yarn install' + yarn $yarnInstall + yarn $scriptCommand + ''' + + def shellCommandWithoutYarnInstall = ''' + set +x + source ~/.bashrc + nvm install $node_version + nvm version + + npm install -g yarn@$yarn_version + + echo 'Running without Yarn install' + yarn $scriptCommand + ''' + + LinkedHashMap minimalUnitTestConfig = [ + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test" + ] + ] + + def setup() { + LinkedHashMap config = [:] + LinkedHashMap stepContext = [ + name: "unit_test" + ] + LinkedHashMap env = [:] + + YarnInvoke = loadPipelineScriptForStep("yarn", "yarn_invoke") + + explicitlyMockPipelineStep("inside_sdp_image") + explicitlyMockPipelineVariable("out") + + YarnInvoke.getBinding().setVariable("config", config) + YarnInvoke.getBinding().setVariable("stepContext", stepContext) + YarnInvoke.getBinding().setVariable("env", env) + + getPipelineMock("readJSON")(['file': 'package.json']) >> { + return [ + scripts: [ + test: "jest", + lint: "eslint" + ] + ] + } + } + + def "Fails if Yarn script is not listed in package.json scripts" () { + setup: + YarnInvoke.getBinding().setVariable("config", [unit_test: [stageName: "Yarn Unit Tests", script: "not_found"]]) + when: + YarnInvoke() + then: + 1 * getPipelineMock("error")("script: 'not_found' not found in package.json scripts") + } + + def "Succeeds when Yarn script is listed in package.json scripts" () { + setup: + YarnInvoke.getBinding().setVariable("config", minimalUnitTestConfig) + when: + YarnInvoke() + then: + 0 * getPipelineMock("error")("script: 'test' not found in package.json scripts") + } + + def "defaults node_version, yarn_version, and yarnInstall correctly if they are not otherwise specified" () { + setup: + YarnInvoke.getBinding().setVariable("config", minimalUnitTestConfig) + when: + YarnInvoke() + then: + YarnInvoke.getBinding().variables.env.node_version == 'lts/*' + YarnInvoke.getBinding().variables.env.yarn_version == 'latest' + YarnInvoke.getBinding().variables.env.yarnInstall == "install --frozen-lockfile" + } + + def "Library sets config for node_version, yarn_version, yarnInstall, scriptCommand, and environment variables when specified and App Env does not" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + yarn_version: "config_yarn_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "config_scriptCommand", + yarnInstall: "config_yarn_install", + env: [ + someKey: "some_config_value" + ] + ] + ]) + when: + YarnInvoke() + then: + YarnInvoke.getBinding().variables.env.node_version == "config_node_version" + YarnInvoke.getBinding().variables.env.yarn_version == "config_yarn_version" + YarnInvoke.getBinding().variables.env.yarnInstall == "config_yarn_install" + YarnInvoke.getBinding().variables.env.scriptCommand == "config_scriptCommand" + YarnInvoke.getBinding().variables.env.someKey == "some_config_value" + } + + def "App Env overrides library config for node_version, yarn_version, yarnInstall, scriptCommand and environment variables" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + yarn_version: "config_yarn_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "config_scriptCommand", + yarnInstall: "config_yarn_install", + env: [ + someKey: "some_config_value" + ] + ] + ]) + when: + YarnInvoke([ + yarn: [ + node_version: "appEnv_node_version", + yarn_version: "appEnv_yarn_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "appEnv_scriptCommand", + yarnInstall: "appEnv_yarn_install", + env: [ + someKey: "some_appEnv_value" + ] + ] + ] + ]) + then: + YarnInvoke.getBinding().variables.env.node_version == "appEnv_node_version" + YarnInvoke.getBinding().variables.env.yarnInstall == "appEnv_yarn_install" + YarnInvoke.getBinding().variables.env.scriptCommand == "appEnv_scriptCommand" + YarnInvoke.getBinding().variables.env.someKey == "some_appEnv_value" + } + + def "Defaults Yarn install to 'frozen-lockfile' when yarnInstall is not set; runs yarn install step" () { + setup: + YarnInvoke.getBinding().setVariable("config", minimalUnitTestConfig) + when: + YarnInvoke() + then: + YarnInvoke.getBinding().variables.env.yarnInstall == "install --frozen-lockfile" + 1 * getPipelineMock("sh")(shellCommandWithYarnInstall) + } + + def "Skips Yarn install step when yarnInstall is set to \"skip\"" () { + setup: + YarnInvoke.getBinding().setVariable("config", [unit_test: [stageName: "Yarn Unit Tests", script: "test", yarnInstall: "skip"]]) + when: + YarnInvoke() + then: + 1 * getPipelineMock("sh")(shellCommandWithoutYarnInstall) + } + + def "Archives artifacts correctly" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + artifacts: [ + "coverage/lcov.info", + "coverage/lcov-report/**/*" + ] + ] + ]) + when: + YarnInvoke() + then: + 2 * getPipelineMock("archiveArtifacts.call")(_ as Map) + } + + def "Records ESLint results when useEslintPlugin is true" () { + setup: + YarnInvoke.getBinding().setVariable("stepContext", [name: "lint_code"]) + YarnInvoke.getBinding().setVariable("config", [ + lint_code: [ + stageName: "Yarn Linting", + script: "lint", + useEslintPlugin: true + ] + ]) + when: + YarnInvoke() + then: + 1 * explicitlyMockPipelineStep("esLint")(_ as Map) + 1 * explicitlyMockPipelineStep("recordIssues")(_ as Map) + } + + def "Secrets set by library config when specified in library config and not specified in App Env" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + env: [ + secrets: [ + someTextSecret: [ + type: "text", + name: "TEXT_TOKEN", + id: "credId" + ] + ] + ] + ] + ]) + when: + YarnInvoke() + then: + 1 * getPipelineMock("string.call")([ + 'credentialsId':'credId', + 'variable':'TEXT_TOKEN' + ]) >> "string('credentialsId':'credId', 'variable':'TEXT_TOKEN')" + 1 * getPipelineMock("withCredentials")(_) >> {_arguments -> + assert _arguments[0][0] == ["string('credentialsId':'credId', 'variable':'TEXT_TOKEN')"] + } + } + + def "Secrets set by App Env override same secrets set by library config when specified in both" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + env: [ + secrets: [ + someTextSecret: [ + type: "text", + name: "config_TEXT_TOKEN", + id: "config_credId" + ] + ] + ] + ] + ]) + when: + YarnInvoke([ + yarn: [ + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + env: [ + secrets: [ + someTextSecret: [ + type: "text", + name: "appEnv_TEXT_TOKEN", + id: "appEnv_credId" + ] + ] + ] + ] + ] + ]) + then: + 1 * getPipelineMock("string.call")([ + 'credentialsId':'appEnv_credId', + 'variable':'appEnv_TEXT_TOKEN' + ]) >> "string('credentialsId':'appEnv_credId', 'variable':'appEnv_TEXT_TOKEN')" + 1 * getPipelineMock("withCredentials")(_) >> {_arguments -> + assert _arguments[0][0] == ["string('credentialsId':'appEnv_credId', 'variable':'appEnv_TEXT_TOKEN')"] + } + } + + def "Secrets without an id cause an error" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + env: [ + secrets: [ + someTextSecret: [ + type: "text", + name: "TEXT_TOKEN" + ] + ] + ] + ] + ]) + when: + YarnInvoke() + then: + 1* getPipelineMock("error")([ + "Yarn Library Validation Errors: ", + "- secret 'someTextSecret' must define 'id'" + ]) + } + + def "Secrets of invalid type cause an error" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + env: [ + secrets: [ + someSecret: [ + type: "not_a_type", + name: "TEXT_TOKEN", + id: "credId" + ] + ] + ] + ] + ]) + when: + YarnInvoke() + then: + 1* getPipelineMock("error")([ + "Yarn Library Validation Errors: ", + "- secret 'someSecret': type 'not_a_type' is not defined" + ]) + } + + def "Text type secrets of invalid format cause an error" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + env: [ + secrets: [ + someTextSecret: [ + type: "text", + id: "credId" + ] + ] + ] + ] + ]) + when: + YarnInvoke() + then: + 1* getPipelineMock("error")([ + "Yarn Library Validation Errors: ", + "- secret 'someTextSecret' must define 'name'" + ]) + } + + def "usernamePassword type secrets of invalid format cause an error" () { + setup: + YarnInvoke.getBinding().setVariable("config", [ + node_version: "config_node_version", + unit_test: [ + stageName: "Yarn Unit Tests", + script: "test", + env: [ + secrets: [ + someUsernamePasswordSecret: [ + type: "usernamePassword", + id: "credId" + ] + ] + ] + ] + ]) + when: + YarnInvoke() + then: + 1* getPipelineMock("error")([ + "Yarn Library Validation Errors: ", + "- secret 'someUsernamePasswordSecret' must define 'usernameVar'", + "- secret 'someUsernamePasswordSecret' must define 'passwordVar'" + ]) + } +}