diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a1ba7a51a..ea0ddc7ca 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -175,22 +175,8 @@ jobs: secrets: PUSH_TO_OTHER_REPOS_TOKEN_ADMIN: ${{ secrets.PUSH_TO_OTHER_REPOS_TOKEN_ADMIN }} - test_plan: - needs: deploy_modules - uses: ./.github/workflows/plan_cli.yml - with: - use_modules_from_terraform_registry: true - explicit_ref: master - secrets: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_ACCESS_KEY_ID_STAGE: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} - AWS_SECRET_ACCESS_KEY_STAGE: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - DAM_LICENSE: ${{ secrets.DAM_LICENSE }} - release: - needs: [test_plan] + needs: deploy_modules runs-on: ubuntu-latest steps: - name: Checkout @@ -203,4 +189,28 @@ jobs: gh release create ${{ github.event.inputs.future_release }} --verify-tag --latest --generate-notes env: GH_TOKEN: ${{ github.token }} - + + test_plan: + needs: release + uses: ./.github/workflows/plan_cli.yml + with: + use_modules_from_terraform_registry: true + explicit_ref: master + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_ACCESS_KEY_ID_STAGE: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} + AWS_SECRET_ACCESS_KEY_STAGE: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + DAM_LICENSE: ${{ secrets.DAM_LICENSE }} + + test_apply: + needs: test_plan + uses: ./.github/workflows/sonar_poc_cli.yml + with: + use_modules_from_terraform_registry: true + explicit_ref: master + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/sonar_upgrade.yml b/.github/workflows/sonar_upgrade.yml index 0d7384b36..e92a961d2 100644 --- a/.github/workflows/sonar_upgrade.yml +++ b/.github/workflows/sonar_upgrade.yml @@ -273,8 +273,10 @@ jobs: run: | if [ ${{ github.event_name }} == 'schedule' ]; then mv $UPGRADE_EXAMPLE_DIR/main.tf{,_} + mv $UPGRADE_EXAMPLE_DIR/outputs.tf{,_} terraform -chdir=$UPGRADE_EXAMPLE_DIR destroy -auto-approve mv $UPGRADE_EXAMPLE_DIR/main.tf{_,} + mv $UPGRADE_EXAMPLE_DIR/outputs.tf{_,} fi - name: Terraform Validate for upgrade diff --git a/.github/workflows/sonar_upgrade_unit_tests.yml b/.github/workflows/sonar_upgrade_unit_tests.yml index 22c6210ea..722ff7baa 100644 --- a/.github/workflows/sonar_upgrade_unit_tests.yml +++ b/.github/workflows/sonar_upgrade_unit_tests.yml @@ -66,7 +66,8 @@ jobs: working-directory: ${{ env.PYTHON_UPGRADER_DIR }} run: | source venv/bin/activate - pytest + coverage run -m pytest + coverage report - name: Check how was the workflow run id: check-trigger diff --git a/.gitignore b/.gitignore index c50c97b0b..4fb93f726 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ dsf_hub_ssh_key.pub *.svg *terraform.tfvars *backend.tf +*backend.tf_ *gitignore* *.terraform* *.DS_Store @@ -22,3 +23,4 @@ myip-default *.mprv __pycache__/ upgrade_status.json +.coverage diff --git a/PREVIOUS_RELEASES.md b/PREVIOUS_RELEASES.md new file mode 100644 index 000000000..ad347c852 --- /dev/null +++ b/PREVIOUS_RELEASES.md @@ -0,0 +1,119 @@ +The following table lists _previous_ releases of eDSF Kit, their release date, a high-level summary of the release's content and whether they are active or deprecated. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Date + Version + Details + Status +
3 Nov 2022 + 1.0.0 + First release for SEs. Beta. + Deprecated +
20 Nov 2022 + 1.1.0 + Second Release for SEs. Beta. + Deprecated +
3 Jan 2023 + 1.2.0 + 1. Added multi accounts example.
2. Changed modules interface. +
Deprecated +
19 Jan 2023 + 1.3.4 + 1. Refactored directory structure.
2. Released to terraform registry.
3. Supported DSF Hub / Agentless Gateway on RedHat 7 ami.
4. Restricted permissions for Sonar installation.
5. Added the module's version to the examples. +
Deprecated +
26 Jan 2023 + 1.3.5 + 1. Enabled creating RDS MsSQL with synthetic data for POC purposes.
2. Fixed manual and automatic installer machine deployments. +
Deprecated +
5 Feb 2023 + 1.3.6 + Supported SSH proxy for DSF Hub / Agentless Gateway in modules: hub, agentless-gw, federation, poc-db-onboarder. + Deprecated +
28 Feb 2023 + 1.3.7 + + 1. Added the option to provide a custom security group id for the DSF Hub and the Agentless Gateway via the 'security_group_id' variable. +
2. Restricted network resources and general IAM permissions. +
3. Added a new installation example - single_account_deployment. +
4. Added the minimum required Terraform version to all modules. +
5. Added the option to provide EC2 AMI filter details for the DSF Hub and Agentless Gateway via the 'ami' variable. +
6. For user-provided AMI for the DSF node (DSF Hub and the Agentless Gateway) that denies execute access in '/tmp' folder, added the option to specify an alternative path via the 'terraform_script_path_folder' variable. +
7. Passed the password of the DSF node via AWS Secrets Manager. +
8. Added the option to provide a custom S3 bucket location for the Sonar binaries via the 'tarball_location' variable. +
9. Bug fixes. +
Active +
16 Mar 2023 + 1.3.9 + + 1. Added support for deploying a DSF node on an EC2 without outbound internet access by providing a custom AMI with the required dependencies and creating VPC endpoints. +
2. Replaced the installer machine manual and automatic deployment modes with a new and simplified single installer machine mode. +
3. Added support for storing the Terraform state in an AWS S3 bucket. +
4. Made adjustments to support Terraform version 1.4.0. +
Active +
27 Mar 2023 + 1.3.10 + + 1. Added support for supplying a custom key-pair for ssh to the DSF Hub and the Agentless Gateway. +
2. Added support for the new Sonar public patch '4.10.0.1'. +
Active +
diff --git a/README.md b/README.md index fd6c4de9f..a3235cbd6 100644 --- a/README.md +++ b/README.md @@ -126,7 +126,7 @@ This guide references the following information and links, some of which are ava - eDSF Kit GitHub Repository + eDSF Kit GitHub Repository @@ -151,7 +151,10 @@ Latest Supported Terraform Version: 1.5.x. Using a higher version may result in #### Version History -The following table lists the released eDSF Kit versions, their release date and a high-level summary of each version's content. + +The following table lists the _latest_ eDSF Kit releases, their release date and a high-level summary of the release's content. + +[Previous releases](https://github.com/imperva/dsfkit/blob/1.6.0/PREVIOUS_RELEASES.md) @@ -162,93 +165,6 @@ The following table lists the released eDSF Kit versions, their release date and - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -424,6 +340,21 @@ The following table lists the released eDSF Kit versions, their release date and
2. Updated the Sonar upgrade Alpha release. + + + + +
Details
3 Nov 2022 - 1.0.0 - First release for SEs. Beta. -
20 Nov 2022 - 1.1.0 - Second Release for SEs. Beta. -
3 Jan 2023 - 1.2.0 - 1. Added multi accounts example.
2. Changed modules interface. -
19 Jan 2023 - 1.3.4 - 1. Refactored directory structure.
2. Released to terraform registry.
3. Supported DSF Hub / Agentless Gateway on RedHat 7 ami.
4. Restricted permissions for Sonar installation.
5. Added the module's version to the examples. -
26 Jan 2023 - 1.3.5 - 1. Enabled creating RDS MsSQL with synthetic data for POC purposes.
2. Fixed manual and automatic installer machine deployments. -
5 Feb 2023 - 1.3.6 - Supported SSH proxy for DSF Hub / Agentless Gateway in modules: hub, agentless-gw, federation, poc-db-onboarder. -
28 Feb 2023 - 1.3.7 - - 1. Added the option to provide a custom security group id for the DSF Hub and the Agentless Gateway via the 'security_group_id' variable. -
2. Restricted network resources and general IAM permissions. -
3. Added a new installation example - single_account_deployment. -
4. Added the minimum required Terraform version to all modules. -
5. Added the option to provide EC2 AMI filter details for the DSF Hub and Agentless Gateway via the 'ami' variable. -
6. For user-provided AMI for the DSF node (DSF Hub and the Agentless Gateway) that denies execute access in '/tmp' folder, added the option to specify an alternative path via the 'terraform_script_path_folder' variable. -
7. Passed the password of the DSF node via AWS Secrets Manager. -
8. Added the option to provide a custom S3 bucket location for the Sonar binaries via the 'tarball_location' variable. -
9. Bug fixes. -
16 Mar 2023 - 1.3.9 - - 1. Added support for deploying a DSF node on an EC2 without outbound internet access by providing a custom AMI with the required dependencies and creating VPC endpoints. -
2. Replaced the installer machine manual and automatic deployment modes with a new and simplified single installer machine mode. -
3. Added support for storing the Terraform state in an AWS S3 bucket. -
4. Made adjustments to support Terraform version 1.4.0. -
27 Mar 2023 - 1.3.10 - - 1. Added support for supplying a custom key-pair for ssh to the DSF Hub and the Agentless Gateway. -
2. Added support for the new Sonar public patch '4.10.0.1'. -
3 Apr 2023
6 Nov 2023 + 1.6.0 + + 1. Sonar upgrade Beta release. +
2. Added automatic association between the DSF Hub and the DRA Admin and Analytics. As a result of this association, + the DRA sends its security issues to the DSF Hub, and the DSF Hub sends its security issues and audit from agent + and agentless sources to the DRA. +
3. Added the option to provide a URL to download the DSF Hub and Agentless Gateway tarball via the 'tarball_url' variable. +
4. Added the option to deploy the Agentless Gateway in a public subnet via the 'use_public_ip' variable. +
5. Added the option to provide a different IP for federation via the 'dsf_hub_federation_ip' and 'dsf_gw_federation_ip' variables. +
@@ -469,7 +400,7 @@ e.g., with or without DRA, the number of Agentless Gateways, with or without HAD We provide several of out-of-the-box Terraform recipes we call "examples" which are already configured to deploy common DSF environments. You can use the example as is, or customize it to accommodate your deployment requirements. -These examples can be found in the eDSF Kit GitHub Repository under the examples directory. +These examples can be found in the eDSF Kit GitHub Repository under the examples directory. Some examples are intended for Lab or POC and others for actual DSF deployments by Professional Services and customers. For more details about each example, click on the example name. @@ -486,73 +417,73 @@ For more details about each example, click on the example name. - Sonar Basic Deployment + Sonar Basic Deployment Lab/POC A DSF deployment with a DSF Hub, an Agentless Gateway, federation, networking and onboarding of a MySQL DB. - sonar_basic_deployment_1_5_7.zip + sonar_basic_deployment_1_6_0.zip - Sonar HADR Deployment + Sonar HADR Deployment Lab/POC A DSF deployment with a DSF Hub, an Agentless Gateway, DSF Hub and Agentless Gateway HADR, federation, networking and onboarding of a MySQL DB. - sonar_hadr_deployment_1_5_7.zip + sonar_hadr_deployment_1_6_0.zip - Sonar Single Account Deployment + Sonar Single Account Deployment PS/Customer A DSF deployment with a DSF Hub HADR, an Agentless Gateway and federation. The DSF nodes (Hubs and Agentless Gateway) are in the same AWS account and the same region. It is mandatory to provide as input to this example the subnets to deploy the DSF nodes on. - sonar_single_account_deployment_1_5_7.zip + sonar_single_account_deployment_1_6_0.zip - Sonar Multi Account Deployment + Sonar Multi Account Deployment PS/Customer A DSF deployment with a DSF Hub, an Agentless Gateway and federation. The DSF nodes (Hub and Agentless Gateway) are in different AWS accounts. It is mandatory to provide as input to this example the subnets to deploy the DSF nodes on. - sonar_multi_account_deployment_1_5_7.zip + sonar_multi_account_deployment_1_6_0.zip - DSF Deployment + DSF Deployment Lab/POC A full DSF deployment with DSF Hub and Agentless Gateways (formerly Sonar), DAM (MX and Agent Gateways), DRA (Admin and DRA Analytics), and Agent and Agentless audit sources. - dsf_deployment_1_5_7.zip + dsf_deployment_1_6_0.zip - DSF Single Account Deployment + DSF Single Account Deployment PS/Customer A full DSF deployment with DSF Hub and Agentless Gateways (formerly Sonar), DAM (MX and Agent Gateways) and DRA (Admin and DRA Analytics). - dsf_single_account_deployment_1_5_7.zip + dsf_single_account_deployment_1_6_0.zip - Sonar Upgrade (Alpha) + Sonar Upgrade (Alpha) All Upgrade of DSF Hub and Agentless Gateway (formerly Sonar). - sonar_upgrade_1_5_7.zip + sonar_upgrade_1_6_0.zip @@ -628,7 +559,7 @@ This mode makes use of the Terraform Command Line Interface (CLI) to deploy and **NOTE:** Update the values for the required parameters to complete the installation: example_name, aws_access_key_id, aws_secret_access_key and region -1. Download the zip file of the example you've chosen (See the [Choosing the Example/Recipe that Fits Your Use Case](#choosing-the-examplerecipe-that-fits-your-use-case) section) from the eDSF Kit GitHub Repository, e.g., if you choose the "sonar_basic_deployment" example, you should download sonar_basic_deployment.zip. +1. Download the zip file of the example you've chosen (See the [Choosing the Example/Recipe that Fits Your Use Case](#choosing-the-examplerecipe-that-fits-your-use-case) section) from the eDSF Kit GitHub Repository, e.g., if you choose the "sonar_basic_deployment" example, you should download sonar_basic_deployment.zip. 2. Unzip the zip file in CLI or using your operating system's UI. For example, in CLI: @@ -725,7 +656,7 @@ This mode can be used if a Linux/Unix machine is not available, or eDSF Kit cann 6. In the Network settings panel - make your configurations while keeping in mind that the installer machine should have access to the DSF environment that you want to deploy, and that your computer should have access to the installer machine. -8. In the “Advanced details” panel, copy and paste the contents of this [bash script](https://github.com/imperva/dsfkit/blob/1.5.7/installer_machine/installer_machine_user_data.sh) into the [User data](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html) textbox.
![User data](https://user-images.githubusercontent.com/87799317/203826003-661c829f-d704-43c4-adb7-854b8008577c.png) +8. In the “Advanced details” panel, copy and paste the contents of this [bash script](https://github.com/imperva/dsfkit/blob/1.6.0/installer_machine/installer_machine_user_data.sh) into the [User data](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html) textbox.
![User data](https://user-images.githubusercontent.com/87799317/203826003-661c829f-d704-43c4-adb7-854b8008577c.png) 9. Click on **Launch Instance**. At this stage, the installer machine is initializing and downloading the necessary dependencies. @@ -744,30 +675,30 @@ This mode can be used if a Linux/Unix machine is not available, or eDSF Kit cann For example: `chmode 400 a_key_pair.pem` -11. Download the zip file of the example you've chosen (See the [Choosing the Example/Recipe that Fits Your Use Case](#choosing-the-examplerecipe-that-fits-your-use-case) section) from the eDSF Kit GitHub Repository, e.g., if you choose the "sonar_basic_deployment" example, you should download sonar_basic_deployment.zip. +11. Download the zip file of the example you've chosen (See the [Choosing the Example/Recipe that Fits Your Use Case](#choosing-the-examplerecipe-that-fits-your-use-case) section) from the eDSF Kit GitHub Repository, e.g., if you choose the "sonar_basic_deployment" example, you should download sonar_basic_deployment.zip. Run: ```bash - wget https://github.com/imperva/dsfkit/raw/1.5.7/examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_5_7.zip + wget https://github.com/imperva/dsfkit/raw/1.6.0/examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_6_0.zip or - wget https://github.com/imperva/dsfkit/raw/1.5.7/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_5_7.zip + wget https://github.com/imperva/dsfkit/raw/1.6.0/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_6_0.zip or - wget https://github.com/imperva/dsfkit/raw/1.5.7/examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_5_7.zip + wget https://github.com/imperva/dsfkit/raw/1.6.0/examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_6_0.zip or - wget https://github.com/imperva/dsfkit/raw/1.5.7/examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_5_7.zip + wget https://github.com/imperva/dsfkit/raw/1.6.0/examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_6_0.zip or - wget https://github.com/imperva/dsfkit/raw/1.5.7/examples/aws/poc/dsf_deployment/dsf_deployment_1_5_7.zip + wget https://github.com/imperva/dsfkit/raw/1.6.0/examples/aws/poc/dsf_deployment/dsf_deployment_1_6_0.zip or - wget https://github.com/imperva/dsfkit/raw/1.5.7/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_5_7.zip + wget https://github.com/imperva/dsfkit/raw/1.6.0/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_6_0.zip ``` 12. Continue by following the [CLI Deployment Mode](#cli-deployment-mode) beginning at step 2. @@ -1055,7 +986,7 @@ After you have [chosen the upgrade mode](#choosing-the-upgrade-mode), follow the This mode offers a straightforward deployment option that relies on running a Terraform script on the user's computer which must be a Linux/Unix machine, e.g, Mac. This mode makes use of the Terraform Command Line Interface (CLI) to deploy and manage environments. -1. Download the zip file of the Sonar upgrade example: sonar_upgrade_1_5_7.zip. +1. Download the zip file of the Sonar upgrade example: sonar_upgrade_1_6_0.zip. 2. Unzip the zip file in CLI or using your operating system's UI. For example, in CLI: @@ -1121,7 +1052,7 @@ This mode can be used if a Linux/Unix machine is not available, or eDSF Kit cann 5. In the Network settings panel - make your configurations while keeping in mind that the installer machine should have access to the DSF environment that you want to deploy, and that your computer should have access to the installer machine. -6. In the “Advanced details” panel, copy and paste the contents of this [bash script](https://github.com/imperva/dsfkit/blob/1.5.7/installer_machine/upgrade_installer_machine_user_data.sh) into the [User data](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html) textbox. +6. In the “Advanced details” panel, copy and paste the contents of this [bash script](https://github.com/imperva/dsfkit/blob/1.6.0/installer_machine/upgrade_installer_machine_user_data.sh) into the [User data](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html) textbox. 9. Click on **Launch Instance**. At this stage, the installer machine is initializing and downloading the necessary dependencies. @@ -1150,13 +1081,13 @@ This mode can be used if a Linux/Unix machine is not available, or eDSF Kit cann If you do not wish to use Terraform to run the upgrade, it is possible to bypass it and run the Python utility directly. -Use the Python Upgrader utility. +Use the Python Upgrader utility. # More Information Information about additional topics can be found in specific examples' READMEs, when relevant. -For example: Sonar Single Account Deployment +For example: Sonar Single Account Deployment These topics include: - Storing Terraform state in S3 bucket @@ -1172,7 +1103,9 @@ terraform apply -auto-approve | tee tf.log Below is a list of possible issues and troubleshooting remediations. - +
+ Common issues +
@@ -1182,6 +1115,46 @@ Below is a list of possible issues and troubleshooting remediations. + + + + + + + + + + + + + + +
Title
Sonar HADR setup internal error + Replication failed!
+ Replication script exited with code 1 +
Contact Imperva's Technical Support. +
Sonar federation internal error + python_commons.http_client.UnexpectedStatusCode: Failed to run: federated_asset_connection_sync. Check /data_vol/sonar-dsf/jsonar/logs/sonarfinder/catalina.out for details.,
+ status: 500, data: None
+ See log "/data_vol/sonar-dsf/jsonar/logs/sonarg/federated.log" for details +
Contact Imperva's Technical Support. +
DAM configuration script exists with status code 28 + : exit status 28. Output: + set -e + Rerun “terraform apply”. +
+ +
+ AWS issues + + + + + + + - - - - - - - - - - - - - - - @@ -1302,4 +1248,28 @@ Below is a list of possible issues and troubleshooting remediations. -
Title + Error message + Remediation +
VPC quota exceeded error creating EC2 VPC: VpcLimitExceeded: The maximum number of VPCs has been reached @@ -1248,33 +1221,6 @@ Below is a list of possible issues and troubleshooting remediations. If you intended the DSF node to have outbound intent access, then make sure the private subnets have routing to a NAT gateway or equivalent. If you didn't intend the DSF node to have outbound internet access, follow the instructions for 'Deploying DSF Nodes without Outbound Internet Access' in your example's README.
Sonar HADR setup internal error - Replication failed!
- Replication script exited with code 1 -
Contact Imperva's Technical Support. -
Sonar federation internal error - python_commons.http_client.UnexpectedStatusCode: Failed to run: federated_asset_connection_sync. Check /data_vol/sonar-dsf/jsonar/logs/sonarfinder/catalina.out for details.,
- status: 500, data: None
- See log "/data_vol/sonar-dsf/jsonar/logs/sonarg/federated.log" for details -
Contact Imperva's Technical Support. -
DAM configuration script exists with status code 28 - : exit status 28. Output: + set -e - Rerun “terraform apply”. -
Sonar upgrade tarball download error - missing IAM role on Sonar node EC2 Connect with SSH to the Sonar node EC2 and fix the aws cli profile misconfiguration. Run, for example, 'aws sts get-caller-identity' to test it.
+
+
+ Azure issues + + + + + + + + + + + +
Title + Error message + Remediation +
Cores quota exceeded + Error: creating Linux Virtual Machine ...: compute.VirtualMachinesClient#CreateOrUpdate: Failure sending request: StatusCode=0 -- Original Error: autorest/azure: Service returned an error. Status= Code="OperationNotAllowed" Message="Operation could not be completed as it results in exceeding approved *** Cores quota. + + Increase the quota using the link provided in your own error message. +
+
+ + diff --git a/examples/aws/installation/dsf_single_account_deployment/dam.tf b/examples/aws/installation/dsf_single_account_deployment/dam.tf index d45c68d0b..7558af77c 100644 --- a/examples/aws/installation/dsf_single_account_deployment/dam.tf +++ b/examples/aws/installation/dsf_single_account_deployment/dam.tf @@ -8,7 +8,7 @@ locals { module "mx" { source = "imperva/dsf-mx/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_dam ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "mx"]) @@ -38,7 +38,7 @@ module "mx" { module "agent_gw" { source = "imperva/dsf-agent-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.agent_gw_count friendly_name = join("-", [local.deployment_name_salted, "agent", "gw", count.index]) @@ -68,7 +68,7 @@ module "agent_gw" { module "agent_gw_cluster_setup" { source = "imperva/dsf-agent-gw-cluster-setup/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.create_agent_gw_cluster cluster_name = var.cluster_name != null ? var.cluster_name : join("-", [local.deployment_name_salted, "agent", "gw", "cluster"]) diff --git a/examples/aws/installation/dsf_single_account_deployment/dra.tf b/examples/aws/installation/dsf_single_account_deployment/dra.tf index 02abad9ca..7b1b8798e 100644 --- a/examples/aws/installation/dsf_single_account_deployment/dra.tf +++ b/examples/aws/installation/dsf_single_account_deployment/dra.tf @@ -6,7 +6,7 @@ locals { module "dra_admin" { source = "imperva/dsf-dra-admin/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_dra ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "dra", "admin"]) @@ -28,7 +28,7 @@ module "dra_admin" { module "dra_analytics" { source = "imperva/dsf-dra-analytics/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.dra_analytics_count friendly_name = join("-", [local.deployment_name_salted, "dra", "analytics", count.index]) diff --git a/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_5_7.zip b/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_5_7.zip deleted file mode 100644 index 12968a83e..000000000 Binary files a/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_5_7.zip and /dev/null differ diff --git a/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_6_0.zip b/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_6_0.zip new file mode 100644 index 000000000..77cfd0c69 Binary files /dev/null and b/examples/aws/installation/dsf_single_account_deployment/dsf_single_account_deployment_1_6_0.zip differ diff --git a/examples/aws/installation/dsf_single_account_deployment/main.tf b/examples/aws/installation/dsf_single_account_deployment/main.tf index 92c2aabae..3e5a91973 100644 --- a/examples/aws/installation/dsf_single_account_deployment/main.tf +++ b/examples/aws/installation/dsf_single_account_deployment/main.tf @@ -1,6 +1,6 @@ module "globals" { source = "imperva/dsf-globals/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = var.sonar_version dra_version = var.dra_version @@ -39,7 +39,7 @@ locals { module "key_pair_hub_main" { count = var.hub_main_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-hub-main" private_key_filename = "ssh_keys/dsf_ssh_key-hub-main-${terraform.workspace}" tags = local.tags @@ -51,7 +51,7 @@ module "key_pair_hub_main" { module "key_pair_hub_dr" { count = var.hub_dr_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-hub-dr" private_key_filename = "ssh_keys/dsf_ssh_key-hub-dr-${terraform.workspace}" tags = local.tags @@ -63,7 +63,7 @@ module "key_pair_hub_dr" { module "key_pair_agentless_gw_main" { count = var.agentless_gw_main_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-gw-main" private_key_filename = "ssh_keys/dsf_ssh_key-agentless-gw-main-${terraform.workspace}" tags = local.tags @@ -75,7 +75,7 @@ module "key_pair_agentless_gw_main" { module "key_pair_agentless_gw_dr" { count = var.agentless_gw_dr_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-gw-dr" private_key_filename = "ssh_keys/dsf_ssh_key-agentless-gw-dr-${terraform.workspace}" tags = local.tags @@ -87,7 +87,7 @@ module "key_pair_agentless_gw_dr" { module "key_pair_mx" { count = var.mx_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-mx" private_key_filename = "ssh_keys/dsf_ssh_key-mx-${terraform.workspace}" tags = local.tags @@ -99,7 +99,7 @@ module "key_pair_mx" { module "key_pair_agent_gw" { count = var.agent_gw_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-agent-gw" private_key_filename = "ssh_keys/dsf_ssh_key-agent-gw-${terraform.workspace}" tags = local.tags @@ -111,7 +111,7 @@ module "key_pair_agent_gw" { module "key_pair_dra_admin" { count = var.dra_admin_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-dra-admin" private_key_filename = "ssh_keys/dsf_ssh_key-dra-admin-${terraform.workspace}" tags = local.tags @@ -123,7 +123,7 @@ module "key_pair_dra_admin" { module "key_pair_dra_analytics" { count = var.dra_analytics_key_pair == null ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-dra-analytics" private_key_filename = "ssh_keys/dsf_ssh_key-dra-analytics-${terraform.workspace}" tags = local.tags diff --git a/examples/aws/installation/dsf_single_account_deployment/sonar.tf b/examples/aws/installation/dsf_single_account_deployment/sonar.tf index 55bbd4e4a..4ff16a5ff 100644 --- a/examples/aws/installation/dsf_single_account_deployment/sonar.tf +++ b/examples/aws/installation/dsf_single_account_deployment/sonar.tf @@ -12,7 +12,7 @@ locals { module "hub_main" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_sonar ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "hub", "main"]) @@ -50,7 +50,7 @@ module "hub_main" { username = mx.web_console_user password = local.password }] : [] - dra_details = var.enable_dra? { + dra_details = var.enable_dra ? { name = module.dra_admin[0].display_name address = module.dra_admin[0].public_ip username = module.dra_admin[0].ssh_user @@ -67,7 +67,7 @@ module "hub_main" { module "hub_dr" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_sonar && var.hub_hadr ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "hub", "DR"]) @@ -112,7 +112,7 @@ module "hub_dr" { module "hub_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(module.hub_dr) > 0 ? 1 : 0 sonar_version = module.globals.tarball_location.version @@ -137,7 +137,7 @@ module "hub_hadr" { module "agentless_gw_main" { source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.agentless_gw_count friendly_name = join("-", [local.deployment_name_salted, "agentless", "gw", count.index, "main"]) @@ -177,7 +177,7 @@ module "agentless_gw_main" { module "agentless_gw_dr" { source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.agentless_gw_hadr ? local.agentless_gw_count : 0 friendly_name = join("-", [local.deployment_name_salted, "agentless", "gw", count.index, "DR"]) @@ -220,7 +220,7 @@ module "agentless_gw_dr" { module "agentless_gw_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(module.agentless_gw_dr) sonar_version = module.globals.tarball_location.version @@ -266,20 +266,20 @@ locals { module "federation" { source = "imperva/dsf-federation/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag for_each = local.hub_gw_combinations hub_info = { - hub_ip_address = each.value[0].ip + hub_ip_address = each.value[0].ip hub_federation_ip_address = each.value[0].ip - hub_private_ssh_key_path = each.value[0].private_key_file_path - hub_ssh_user = each.value[0].instance.ssh_user + hub_private_ssh_key_path = each.value[0].private_key_file_path + hub_ssh_user = each.value[0].instance.ssh_user } gw_info = { - gw_ip_address = each.value[1].instance.private_ip + gw_ip_address = each.value[1].instance.private_ip gw_federation_ip_address = each.value[1].instance.private_ip - gw_private_ssh_key_path = each.value[1].private_key_file_path - gw_ssh_user = each.value[1].instance.ssh_user + gw_private_ssh_key_path = each.value[1].private_key_file_path + gw_ssh_user = each.value[1].instance.ssh_user } hub_proxy_info = var.proxy_address != null ? { proxy_address = var.proxy_address diff --git a/examples/aws/installation/sonar_multi_account_deployment/main.tf b/examples/aws/installation/sonar_multi_account_deployment/main.tf index b5a6a7920..1d22ecb82 100644 --- a/examples/aws/installation/sonar_multi_account_deployment/main.tf +++ b/examples/aws/installation/sonar_multi_account_deployment/main.tf @@ -1,6 +1,6 @@ module "globals" { source = "imperva/dsf-globals/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = var.sonar_version } @@ -25,7 +25,7 @@ locals { module "key_pair_hub_main" { count = local.should_create_hub_main_key_pair ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-hub-main" private_key_filename = "ssh_keys/dsf_ssh_key-hub-main-${terraform.workspace}" tags = local.tags @@ -37,7 +37,7 @@ module "key_pair_hub_main" { module "key_pair_hub_dr" { count = local.should_create_hub_dr_key_pair ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-hub-dr" private_key_filename = "ssh_keys/dsf_ssh_key-hub-dr-${terraform.workspace}" tags = local.tags @@ -49,7 +49,7 @@ module "key_pair_hub_dr" { module "key_pair_gw_main" { count = local.should_create_gw_main_key_pair ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-gw" private_key_filename = "ssh_keys/dsf_ssh_key-gw-main-${terraform.workspace}" tags = local.tags @@ -61,7 +61,7 @@ module "key_pair_gw_main" { module "key_pair_gw_dr" { count = local.should_create_gw_dr_key_pair ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-gw-dr" private_key_filename = "ssh_keys/dsf_ssh_key-gw-dr-${terraform.workspace}" tags = local.tags @@ -106,7 +106,7 @@ locals { ############################## module "hub_main" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "hub", "main"]) subnet_id = var.subnet_hub_main security_group_ids = var.security_group_ids_hub_main @@ -144,7 +144,7 @@ module "hub_main" { module "hub_dr" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "hub", "DR"]) subnet_id = var.subnet_hub_dr security_group_ids = var.security_group_ids_hub_dr @@ -186,7 +186,7 @@ module "hub_dr" { module "agentless_gw_main" { count = var.gw_count source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "gw", count.index, "main"]) subnet_id = var.subnet_gw_main security_group_ids = var.security_group_ids_gw_main @@ -225,7 +225,7 @@ module "agentless_gw_main" { module "agentless_gw_dr" { count = var.gw_count source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "gw", count.index, "DR"]) subnet_id = var.subnet_gw_dr security_group_ids = var.security_group_ids_gw_dr @@ -266,7 +266,7 @@ module "agentless_gw_dr" { module "hub_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = module.globals.tarball_location.version dsf_main_ip = module.hub_main.private_ip dsf_main_private_ip = module.hub_main.private_ip @@ -290,7 +290,7 @@ module "hub_hadr" { module "agentless_gw_hadr" { count = var.gw_count source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = module.globals.tarball_location.version dsf_main_ip = module.agentless_gw_main[count.index].private_ip dsf_main_private_ip = module.agentless_gw_main[count.index].private_ip @@ -324,18 +324,18 @@ locals { module "federation" { count = length(local.hub_gws_combinations) source = "imperva/dsf-federation/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag gw_info = { - gw_ip_address = local.hub_gws_combinations[count.index][1].instance.private_ip + gw_ip_address = local.hub_gws_combinations[count.index][1].instance.private_ip gw_federation_ip_address = local.hub_gws_combinations[count.index][1].instance.private_ip - gw_private_ssh_key_path = local.hub_gws_combinations[count.index][1].private_key_file_path - gw_ssh_user = local.hub_gws_combinations[count.index][1].instance.ssh_user + gw_private_ssh_key_path = local.hub_gws_combinations[count.index][1].private_key_file_path + gw_ssh_user = local.hub_gws_combinations[count.index][1].instance.ssh_user } hub_info = { - hub_ip_address = local.hub_gws_combinations[count.index][0].instance.private_ip + hub_ip_address = local.hub_gws_combinations[count.index][0].instance.private_ip hub_federation_ip_address = local.hub_gws_combinations[count.index][0].instance.private_ip - hub_private_ssh_key_path = local.hub_gws_combinations[count.index][0].private_key_file_path - hub_ssh_user = local.hub_gws_combinations[count.index][0].instance.ssh_user + hub_private_ssh_key_path = local.hub_gws_combinations[count.index][0].private_key_file_path + hub_ssh_user = local.hub_gws_combinations[count.index][0].instance.ssh_user } hub_proxy_info = var.proxy_address != null ? { proxy_address = var.proxy_address diff --git a/examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_5_7.zip b/examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_6_0.zip similarity index 56% rename from examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_5_7.zip rename to examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_6_0.zip index 6c7a4763b..05f772d95 100644 Binary files a/examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_5_7.zip and b/examples/aws/installation/sonar_multi_account_deployment/sonar_multi_account_deployment_1_6_0.zip differ diff --git a/examples/aws/installation/sonar_single_account_deployment/main.tf b/examples/aws/installation/sonar_single_account_deployment/main.tf index 10442803c..979f9e176 100644 --- a/examples/aws/installation/sonar_single_account_deployment/main.tf +++ b/examples/aws/installation/sonar_single_account_deployment/main.tf @@ -5,7 +5,7 @@ provider "aws" { module "globals" { source = "imperva/dsf-globals/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = var.sonar_version } @@ -35,7 +35,7 @@ locals { module "key_pair_hub" { count = local.should_create_hub_key_pair ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-hub" private_key_filename = "ssh_keys/dsf_ssh_key-hub-${terraform.workspace}" tags = local.tags @@ -44,7 +44,7 @@ module "key_pair_hub" { module "key_pair_gw" { count = local.should_create_gw_key_pair ? 1 : 0 source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-gw" private_key_filename = "ssh_keys/dsf_ssh_key-gw-${terraform.workspace}" tags = local.tags @@ -74,7 +74,7 @@ data "aws_subnet" "subnet_gw" { ############################## module "hub_main" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "hub", "main"]) subnet_id = var.subnet_hub_main security_group_ids = var.security_group_ids_hub @@ -105,7 +105,7 @@ module "hub_main" { module "hub_dr" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "hub", "DR"]) subnet_id = var.subnet_hub_dr security_group_ids = var.security_group_ids_hub @@ -140,7 +140,7 @@ module "hub_dr" { module "agentless_gw" { count = var.gw_count source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "gw", count.index]) subnet_id = var.subnet_gw security_group_ids = var.security_group_ids_gw @@ -174,7 +174,7 @@ module "agentless_gw" { module "hub_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = module.globals.tarball_location.version dsf_main_ip = module.hub_main.private_ip dsf_main_private_ip = module.hub_main.private_ip @@ -201,19 +201,19 @@ locals { module "federation" { count = length(local.hub_gw_combinations) source = "imperva/dsf-federation/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag hub_info = { - hub_ip_address = local.hub_gw_combinations[count.index][0].private_ip + hub_ip_address = local.hub_gw_combinations[count.index][0].private_ip hub_federation_ip_address = local.hub_gw_combinations[count.index][0].private_ip - hub_private_ssh_key_path = local.hub_private_key_file_path - hub_ssh_user = local.hub_gw_combinations[count.index][0].ssh_user + hub_private_ssh_key_path = local.hub_private_key_file_path + hub_ssh_user = local.hub_gw_combinations[count.index][0].ssh_user } gw_info = { - gw_ip_address = local.hub_gw_combinations[count.index][1].private_ip + gw_ip_address = local.hub_gw_combinations[count.index][1].private_ip gw_federation_ip_address = local.hub_gw_combinations[count.index][1].private_ip - gw_private_ssh_key_path = local.gw_private_key_file_path - gw_ssh_user = local.hub_gw_combinations[count.index][1].ssh_user + gw_private_ssh_key_path = local.gw_private_key_file_path + gw_ssh_user = local.hub_gw_combinations[count.index][1].ssh_user } gw_proxy_info = var.use_hub_as_proxy ? { proxy_address = module.hub_main.private_ip diff --git a/examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_5_7.zip b/examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_6_0.zip similarity index 57% rename from examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_5_7.zip rename to examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_6_0.zip index cf1d6b6d5..39f7efda5 100644 Binary files a/examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_5_7.zip and b/examples/aws/installation/sonar_single_account_deployment/sonar_single_account_deployment_1_6_0.zip differ diff --git a/examples/aws/poc/dsf_deployment/agent_sources.tf b/examples/aws/poc/dsf_deployment/agent_sources.tf index 9cacdcb8d..eee49398e 100644 --- a/examples/aws/poc/dsf_deployment/agent_sources.tf +++ b/examples/aws/poc/dsf_deployment/agent_sources.tf @@ -4,7 +4,7 @@ locals { module "db_with_agent" { source = "imperva/dsf-db-with-agent/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(local.db_types_for_agent) friendly_name = join("-", [local.deployment_name_salted, "db", "with", "agent", count.index]) diff --git a/examples/aws/poc/dsf_deployment/agentless_sources.tf b/examples/aws/poc/dsf_deployment/agentless_sources.tf index b85c03936..dbd938437 100644 --- a/examples/aws/poc/dsf_deployment/agentless_sources.tf +++ b/examples/aws/poc/dsf_deployment/agentless_sources.tf @@ -4,7 +4,7 @@ locals { module "rds_mysql" { source = "imperva/dsf-poc-db-onboarder/aws//modules/rds-mysql-db" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = contains(local.db_types_for_agentless, "RDS MySQL") ? 1 : 0 rds_subnet_ids = local.db_subnet_ids @@ -14,7 +14,7 @@ module "rds_mysql" { module "rds_mssql" { source = "imperva/dsf-poc-db-onboarder/aws//modules/rds-mssql-db" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = contains(local.db_types_for_agentless, "RDS MsSQL") ? 1 : 0 rds_subnet_ids = local.db_subnet_ids @@ -29,7 +29,7 @@ module "rds_mssql" { module "db_onboarding" { source = "imperva/dsf-poc-db-onboarder/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag for_each = { for idx, val in concat(module.rds_mysql, module.rds_mssql) : idx => val } sonar_version = module.globals.tarball_location.version diff --git a/examples/aws/poc/dsf_deployment/dam.tf b/examples/aws/poc/dsf_deployment/dam.tf index 5de0cd4a1..2603371cc 100644 --- a/examples/aws/poc/dsf_deployment/dam.tf +++ b/examples/aws/poc/dsf_deployment/dam.tf @@ -8,7 +8,7 @@ locals { module "mx" { source = "imperva/dsf-mx/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_dam ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "mx"]) @@ -41,7 +41,7 @@ module "mx" { module "agent_gw" { source = "imperva/dsf-agent-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.agent_gw_count friendly_name = join("-", [local.deployment_name_salted, "agent", "gw", count.index]) @@ -67,7 +67,7 @@ module "agent_gw" { module "agent_gw_cluster_setup" { source = "imperva/dsf-agent-gw-cluster-setup/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.create_agent_gw_cluster cluster_name = join("-", [local.deployment_name_salted, "agent", "gw", "cluster"]) diff --git a/examples/aws/poc/dsf_deployment/dra.tf b/examples/aws/poc/dsf_deployment/dra.tf index ebde4afe6..a70923b1c 100644 --- a/examples/aws/poc/dsf_deployment/dra.tf +++ b/examples/aws/poc/dsf_deployment/dra.tf @@ -6,7 +6,7 @@ locals { module "dra_admin" { source = "imperva/dsf-dra-admin/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_dra ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "dra", "admin"]) @@ -28,7 +28,7 @@ module "dra_admin" { module "dra_analytics" { source = "imperva/dsf-dra-analytics/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.dra_analytics_count friendly_name = join("-", [local.deployment_name_salted, "dra", "analytics", count.index]) diff --git a/examples/aws/poc/dsf_deployment/dsf_deployment_1_5_7.zip b/examples/aws/poc/dsf_deployment/dsf_deployment_1_5_7.zip deleted file mode 100644 index 0965cff9d..000000000 Binary files a/examples/aws/poc/dsf_deployment/dsf_deployment_1_5_7.zip and /dev/null differ diff --git a/examples/aws/poc/dsf_deployment/dsf_deployment_1_6_0.zip b/examples/aws/poc/dsf_deployment/dsf_deployment_1_6_0.zip new file mode 100644 index 000000000..707f9d87f Binary files /dev/null and b/examples/aws/poc/dsf_deployment/dsf_deployment_1_6_0.zip differ diff --git a/examples/aws/poc/dsf_deployment/main.tf b/examples/aws/poc/dsf_deployment/main.tf index 04ecaec70..c1409acb6 100644 --- a/examples/aws/poc/dsf_deployment/main.tf +++ b/examples/aws/poc/dsf_deployment/main.tf @@ -8,7 +8,7 @@ provider "aws" { module "globals" { source = "imperva/dsf-globals/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = var.sonar_version dra_version = var.dra_version @@ -16,7 +16,7 @@ module "globals" { module "key_pair" { source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-" private_key_filename = "ssh_keys/dsf_ssh_key-${terraform.workspace}" diff --git a/examples/aws/poc/dsf_deployment/sonar.tf b/examples/aws/poc/dsf_deployment/sonar.tf index 60ccfed05..4274e145d 100644 --- a/examples/aws/poc/dsf_deployment/sonar.tf +++ b/examples/aws/poc/dsf_deployment/sonar.tf @@ -11,7 +11,7 @@ locals { module "hub_main" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_sonar ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "hub", "main"]) @@ -38,7 +38,7 @@ module "hub_main" { username = mx.web_console_user password = local.password }] : [] - dra_details = var.enable_dra? { + dra_details = var.enable_dra ? { name = module.dra_admin[0].display_name address = module.dra_admin[0].public_ip username = module.dra_admin[0].ssh_user @@ -53,7 +53,7 @@ module "hub_main" { module "hub_dr" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_sonar && var.hub_hadr ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "hub", "DR"]) @@ -85,7 +85,7 @@ module "hub_dr" { module "hub_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(module.hub_dr) > 0 ? 1 : 0 sonar_version = module.globals.tarball_location.version @@ -103,7 +103,7 @@ module "hub_hadr" { module "agentless_gw_main" { source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.agentless_gw_count friendly_name = join("-", [local.deployment_name_salted, "agentless", "gw", count.index, "main"]) @@ -133,7 +133,7 @@ module "agentless_gw_main" { module "agentless_gw_dr" { source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.agentless_gw_hadr ? local.agentless_gw_count : 0 friendly_name = join("-", [local.deployment_name_salted, "agentless", "gw", count.index, "DR"]) @@ -166,7 +166,7 @@ module "agentless_gw_dr" { module "agentless_gw_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(module.agentless_gw_dr) sonar_version = module.globals.tarball_location.version @@ -210,20 +210,20 @@ locals { module "federation" { source = "imperva/dsf-federation/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag for_each = local.hub_gw_combinations hub_info = { - hub_ip_address = each.value[0].public_ip + hub_ip_address = each.value[0].public_ip hub_federation_ip_address = each.value[0].public_ip - hub_private_ssh_key_path = module.key_pair.private_key_file_path - hub_ssh_user = each.value[0].ssh_user + hub_private_ssh_key_path = module.key_pair.private_key_file_path + hub_ssh_user = each.value[0].ssh_user } gw_info = { - gw_ip_address = each.value[1].private_ip + gw_ip_address = each.value[1].private_ip gw_federation_ip_address = each.value[1].private_ip - gw_private_ssh_key_path = module.key_pair.private_key_file_path - gw_ssh_user = each.value[1].ssh_user + gw_private_ssh_key_path = module.key_pair.private_key_file_path + gw_ssh_user = each.value[1].ssh_user } gw_proxy_info = { proxy_address = module.hub_main[0].public_ip diff --git a/examples/aws/poc/sonar_basic_deployment/main.tf b/examples/aws/poc/sonar_basic_deployment/main.tf index f349ee8df..04d2afb7c 100644 --- a/examples/aws/poc/sonar_basic_deployment/main.tf +++ b/examples/aws/poc/sonar_basic_deployment/main.tf @@ -8,14 +8,14 @@ provider "aws" { module "globals" { source = "imperva/dsf-globals/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = var.sonar_version } module "key_pair" { source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-" private_key_filename = "ssh_keys/dsf_ssh_key-${terraform.workspace}" @@ -80,7 +80,7 @@ data "aws_subnet" "gw" { module "hub" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "hub"]) instance_type = var.hub_instance_type @@ -106,7 +106,7 @@ module "hub" { module "agentless_gw" { source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.gw_count friendly_name = join("-", [local.deployment_name_salted, "gw", count.index]) @@ -135,20 +135,20 @@ module "agentless_gw" { module "federation" { source = "imperva/dsf-federation/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag for_each = { for idx, val in module.agentless_gw : idx => val } hub_info = { - hub_ip_address = module.hub.public_ip + hub_ip_address = module.hub.public_ip hub_federation_ip_address = module.hub.public_ip - hub_private_ssh_key_path = module.key_pair.private_key_file_path - hub_ssh_user = module.hub.ssh_user + hub_private_ssh_key_path = module.key_pair.private_key_file_path + hub_ssh_user = module.hub.ssh_user } gw_info = { - gw_ip_address = each.value.private_ip + gw_ip_address = each.value.private_ip gw_federation_ip_address = each.value.private_ip - gw_private_ssh_key_path = module.key_pair.private_key_file_path - gw_ssh_user = each.value.ssh_user + gw_private_ssh_key_path = module.key_pair.private_key_file_path + gw_ssh_user = each.value.ssh_user } gw_proxy_info = { proxy_address = module.hub.public_ip @@ -163,7 +163,7 @@ module "federation" { module "rds_mysql" { source = "imperva/dsf-poc-db-onboarder/aws//modules/rds-mysql-db" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = contains(var.db_types_to_onboard, "RDS MySQL") ? 1 : 0 rds_subnet_ids = local.db_subnet_ids @@ -173,7 +173,7 @@ module "rds_mysql" { module "rds_mssql" { source = "imperva/dsf-poc-db-onboarder/aws//modules/rds-mssql-db" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = contains(var.db_types_to_onboard, "RDS MsSQL") ? 1 : 0 rds_subnet_ids = local.db_subnet_ids @@ -188,7 +188,7 @@ module "rds_mssql" { module "db_onboarding" { source = "imperva/dsf-poc-db-onboarder/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag for_each = { for idx, val in concat(module.rds_mysql, module.rds_mssql) : idx => val } sonar_version = module.globals.tarball_location.version diff --git a/examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_5_7.zip b/examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_6_0.zip similarity index 50% rename from examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_5_7.zip rename to examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_6_0.zip index 3d2261f52..4907e7083 100644 Binary files a/examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_5_7.zip and b/examples/aws/poc/sonar_basic_deployment/sonar_basic_deployment_1_6_0.zip differ diff --git a/examples/aws/poc/sonar_hadr_deployment/main.tf b/examples/aws/poc/sonar_hadr_deployment/main.tf index 2a9af65bd..43da0367d 100644 --- a/examples/aws/poc/sonar_hadr_deployment/main.tf +++ b/examples/aws/poc/sonar_hadr_deployment/main.tf @@ -8,13 +8,13 @@ provider "aws" { module "globals" { source = "imperva/dsf-globals/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = var.sonar_version } module "key_pair" { source = "imperva/dsf-globals/aws//modules/key_pair" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag key_name_prefix = "imperva-dsf-" private_key_filename = "ssh_keys/dsf_ssh_key-${terraform.workspace}" tags = local.tags @@ -87,7 +87,7 @@ module "vpc" { ############################## module "hub_main" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "hub", "main"]) instance_type = var.hub_instance_type @@ -114,7 +114,7 @@ module "hub_main" { module "hub_dr" { source = "imperva/dsf-hub/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag friendly_name = join("-", [local.deployment_name_salted, "hub", "DR"]) instance_type = var.hub_instance_type @@ -143,7 +143,7 @@ module "hub_dr" { module "agentless_gw_main" { source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.gw_count friendly_name = join("-", [local.deployment_name_salted, "gw", count.index, "main"]) @@ -173,7 +173,7 @@ module "agentless_gw_main" { module "agentless_gw_dr" { source = "imperva/dsf-agentless-gw/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.gw_count friendly_name = join("-", [local.deployment_name_salted, "gw", count.index, "DR"]) @@ -206,7 +206,7 @@ module "agentless_gw_dr" { module "hub_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag sonar_version = module.globals.tarball_location.version dsf_main_ip = module.hub_main.public_ip @@ -223,7 +223,7 @@ module "hub_hadr" { module "agentless_gw_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.gw_count sonar_version = module.globals.tarball_location.version @@ -256,20 +256,20 @@ locals { module "federation" { source = "imperva/dsf-federation/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(local.hub_gw_combinations) hub_info = { - hub_ip_address = local.hub_gw_combinations[count.index][0].public_ip + hub_ip_address = local.hub_gw_combinations[count.index][0].public_ip hub_federation_ip_address = local.hub_gw_combinations[count.index][0].public_ip - hub_private_ssh_key_path = module.key_pair.private_key_file_path - hub_ssh_user = local.hub_gw_combinations[count.index][0].ssh_user + hub_private_ssh_key_path = module.key_pair.private_key_file_path + hub_ssh_user = local.hub_gw_combinations[count.index][0].ssh_user } gw_info = { - gw_ip_address = local.hub_gw_combinations[count.index][1].private_ip + gw_ip_address = local.hub_gw_combinations[count.index][1].private_ip gw_federation_ip_address = local.hub_gw_combinations[count.index][1].private_ip - gw_private_ssh_key_path = module.key_pair.private_key_file_path - gw_ssh_user = local.hub_gw_combinations[count.index][1].ssh_user + gw_private_ssh_key_path = module.key_pair.private_key_file_path + gw_ssh_user = local.hub_gw_combinations[count.index][1].ssh_user } gw_proxy_info = { proxy_address = module.hub_main.public_ip @@ -284,7 +284,7 @@ module "federation" { module "rds_mysql" { source = "imperva/dsf-poc-db-onboarder/aws//modules/rds-mysql-db" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = contains(var.db_types_to_onboard, "RDS MySQL") ? 1 : 0 rds_subnet_ids = local.db_subnet_ids @@ -295,7 +295,7 @@ module "rds_mysql" { # create a RDS SQL Server DB module "rds_mssql" { source = "imperva/dsf-poc-db-onboarder/aws//modules/rds-mssql-db" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = contains(var.db_types_to_onboard, "RDS MsSQL") ? 1 : 0 rds_subnet_ids = local.db_subnet_ids @@ -310,7 +310,7 @@ module "rds_mssql" { module "db_onboarding" { source = "imperva/dsf-poc-db-onboarder/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag for_each = { for idx, val in concat(module.rds_mysql, module.rds_mssql) : idx => val } sonar_version = module.globals.tarball_location.version diff --git a/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_5_7.zip b/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_5_7.zip deleted file mode 100644 index 47ba31e9c..000000000 Binary files a/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_5_7.zip and /dev/null differ diff --git a/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_6_0.zip b/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_6_0.zip new file mode 100644 index 000000000..6d70cf159 Binary files /dev/null and b/examples/aws/poc/sonar_hadr_deployment/sonar_hadr_deployment_1_6_0.zip differ diff --git a/examples/aws/sonar_upgrade/main.tf b/examples/aws/sonar_upgrade/main.tf index 1467c675b..33762e0d9 100644 --- a/examples/aws/sonar_upgrade/main.tf +++ b/examples/aws/sonar_upgrade/main.tf @@ -1,6 +1,6 @@ module "sonar_upgrader" { source = "imperva/dsf-sonar-upgrader/aws" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag # Fill the details of the Agentless Gateways and DSF Hubs that you want to upgrade agentless_gws = [ diff --git a/examples/aws/sonar_upgrade/outputs.tf b/examples/aws/sonar_upgrade/outputs.tf new file mode 100644 index 000000000..84e79f4e2 --- /dev/null +++ b/examples/aws/sonar_upgrade/outputs.tf @@ -0,0 +1,43 @@ +output "agentless_gws" { + value = module.sonar_upgrader.agentless_gws +} + +output "dsf_hubs" { + value = module.sonar_upgrader.dsf_hubs +} + +output "target_version" { + value = module.sonar_upgrader.target_version +} + +output "test_connection" { + value = module.sonar_upgrader.test_connection +} + +output "run_preflight_validations" { + value = module.sonar_upgrader.run_preflight_validations +} + +output "run_upgrade" { + value = module.sonar_upgrader.run_upgrade +} + +output "run_postflight_validations" { + value = module.sonar_upgrader.run_postflight_validations +} + +#output "clean_old_deployments" { +# value = module.sonar_upgrader.clean_old_deployments +#} + +output "stop_on_failure" { + value = module.sonar_upgrader.stop_on_failure +} + +output "tarball_location" { + value = var.tarball_location +} + +output "summary" { + value = try(jsondecode(file("upgrade_status.json")), null) +} \ No newline at end of file diff --git a/examples/aws/sonar_upgrade/sonar_upgrade_1_5_7.zip b/examples/aws/sonar_upgrade/sonar_upgrade_1_5_7.zip deleted file mode 100644 index 3641ffffe..000000000 Binary files a/examples/aws/sonar_upgrade/sonar_upgrade_1_5_7.zip and /dev/null differ diff --git a/examples/aws/sonar_upgrade/sonar_upgrade_1_6_0.zip b/examples/aws/sonar_upgrade/sonar_upgrade_1_6_0.zip new file mode 100644 index 000000000..794cc0690 Binary files /dev/null and b/examples/aws/sonar_upgrade/sonar_upgrade_1_6_0.zip differ diff --git a/examples/azure/poc/dsf_deployment/dsf_deployment_1_5_7.zip b/examples/azure/poc/dsf_deployment/dsf_deployment_1_5_7.zip deleted file mode 100644 index 5b42f6cf7..000000000 Binary files a/examples/azure/poc/dsf_deployment/dsf_deployment_1_5_7.zip and /dev/null differ diff --git a/examples/azure/poc/dsf_deployment/dsf_deployment_1_6_0.zip b/examples/azure/poc/dsf_deployment/dsf_deployment_1_6_0.zip new file mode 100644 index 000000000..b6fd778b7 Binary files /dev/null and b/examples/azure/poc/dsf_deployment/dsf_deployment_1_6_0.zip differ diff --git a/examples/azure/poc/dsf_deployment/main.tf b/examples/azure/poc/dsf_deployment/main.tf index d810d1b69..0cab45659 100644 --- a/examples/azure/poc/dsf_deployment/main.tf +++ b/examples/azure/poc/dsf_deployment/main.tf @@ -1,6 +1,6 @@ module "globals" { source = "imperva/dsf-globals/azurerm" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag } resource "azurerm_resource_group" "rg" { diff --git a/examples/azure/poc/dsf_deployment/sonar.tf b/examples/azure/poc/dsf_deployment/sonar.tf index 2b25c8bc7..a45e97778 100644 --- a/examples/azure/poc/dsf_deployment/sonar.tf +++ b/examples/azure/poc/dsf_deployment/sonar.tf @@ -4,7 +4,7 @@ locals { module "hub_main" { source = "imperva/dsf-hub/azurerm" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_sonar ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "hub"]) @@ -34,7 +34,7 @@ module "hub_main" { module "hub_dr" { source = "imperva/dsf-hub/azurerm" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.enable_sonar && var.hub_hadr ? 1 : 0 friendly_name = join("-", [local.deployment_name_salted, "hub", "DR"]) @@ -66,7 +66,7 @@ module "hub_dr" { module "hub_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(module.hub_dr) > 0 ? 1 : 0 sonar_version = var.sonar_version @@ -84,7 +84,7 @@ module "hub_hadr" { module "agentless_gw_main" { source = "imperva/dsf-agentless-gw/azurerm" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = local.agentless_gw_count friendly_name = join("-", [local.deployment_name_salted, "agentless", "gw", count.index]) @@ -115,7 +115,7 @@ module "agentless_gw_main" { module "agentless_gw_dr" { source = "imperva/dsf-agentless-gw/azurerm" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = var.agentless_gw_hadr ? local.agentless_gw_count : 0 friendly_name = join("-", [local.deployment_name_salted, "agentless", "gw", count.index, "DR"]) @@ -149,7 +149,7 @@ module "agentless_gw_dr" { module "agentless_gw_hadr" { source = "imperva/dsf-hadr/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag count = length(module.agentless_gw_dr) sonar_version = var.sonar_version @@ -193,20 +193,20 @@ locals { module "federation" { source = "imperva/dsf-federation/null" - version = "1.5.7" # latest release tag + version = "1.6.0" # latest release tag for_each = local.hub_gw_combinations hub_info = { - hub_ip_address = each.value[0].public_ip + hub_ip_address = each.value[0].public_ip hub_federation_ip_address = each.value[0].public_ip - hub_private_ssh_key_path = local_sensitive_file.ssh_key.filename - hub_ssh_user = each.value[0].ssh_user + hub_private_ssh_key_path = local_sensitive_file.ssh_key.filename + hub_ssh_user = each.value[0].ssh_user } gw_info = { - gw_ip_address = each.value[1].private_ip + gw_ip_address = each.value[1].private_ip gw_federation_ip_address = each.value[1].private_ip - gw_private_ssh_key_path = local_sensitive_file.ssh_key.filename - gw_ssh_user = each.value[1].ssh_user + gw_private_ssh_key_path = local_sensitive_file.ssh_key.filename + gw_ssh_user = each.value[1].ssh_user } gw_proxy_info = { proxy_address = module.hub_main[0].public_ip diff --git a/modules/aws/agent-gw/README.md b/modules/aws/agent-gw/README.md index 7a6d3135f..123729a3a 100644 --- a/modules/aws/agent-gw/README.md +++ b/modules/aws/agent-gw/README.md @@ -83,4 +83,4 @@ API access to the DSF Management server is required to provision this module. Pl For more information about the DSF Agent Gateway and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/v14.11-database-activity-monitoring-user-guide/page/378.htm). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/agentless-gw/README.md b/modules/aws/agentless-gw/README.md index 2e82ffeac..076582f99 100644 --- a/modules/aws/agentless-gw/README.md +++ b/modules/aws/agentless-gw/README.md @@ -100,4 +100,4 @@ SSH access is required to provision this module. To SSH into the Agentless Gatew For more information about the Agentless Gateway and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/v4.12-sonar-user-guide/page/80401.htm). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/db-with-agent/README.md b/modules/aws/db-with-agent/README.md index 05f6f1ecb..6cb5848d3 100644 --- a/modules/aws/db-with-agent/README.md +++ b/modules/aws/db-with-agent/README.md @@ -69,4 +69,4 @@ module "db_with_agent" { ## Additional Information For more information about the DSF Agent Gateway and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/v14.11-database-activity-monitoring-user-guide/page/378.htm). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/dra-admin/README.md b/modules/aws/dra-admin/README.md index 96ee9e89c..eeb61e09c 100644 --- a/modules/aws/dra-admin/README.md +++ b/modules/aws/dra-admin/README.md @@ -72,4 +72,4 @@ module "dsf_dra_admin" { For more information about the DSF DRA Admin and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/z-kb-articles-km/page/4e487f3c.html). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/dra-analytics/README.md b/modules/aws/dra-analytics/README.md index b052f6693..0c418ee4d 100644 --- a/modules/aws/dra-analytics/README.md +++ b/modules/aws/dra-analytics/README.md @@ -78,4 +78,4 @@ module "dsf_dra_admin" { For more information about the DSF DRA Analytics and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/z-kb-articles-km/page/4e487f3c.html). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/hub/README.md b/modules/aws/hub/README.md index 4291978f7..e7a34ccbd 100644 --- a/modules/aws/hub/README.md +++ b/modules/aws/hub/README.md @@ -96,4 +96,4 @@ SSH access is required to provision this module. To SSH into the DSF Hub instanc For more information about the DSF Hub and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/v4.12-sonar-user-guide/page/80401.htm). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/mx/README.md b/modules/aws/mx/README.md index b440baf56..68ad8947e 100644 --- a/modules/aws/mx/README.md +++ b/modules/aws/mx/README.md @@ -83,4 +83,4 @@ API access is required to provision this module. Please make sure to pass the re For more information about the DSF MX and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/v14.11-dam-management-server-manager-user-guide/page/10068.htm). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/rds-mssql-db/README.md b/modules/aws/rds-mssql-db/README.md index 2c206cc85..95abdd0fc 100644 --- a/modules/aws/rds-mssql-db/README.md +++ b/modules/aws/rds-mssql-db/README.md @@ -80,4 +80,4 @@ module "dsf_rds_mssql" { ## Additional Information -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/aws/sonar-upgrader/README.md b/modules/aws/sonar-upgrader/README.md index 70cb3fb59..6a9cc2612 100644 --- a/modules/aws/sonar-upgrader/README.md +++ b/modules/aws/sonar-upgrader/README.md @@ -76,3 +76,12 @@ Among the DSF Hubs, if more than one is specified, the upgrade order is as appea The upgrade order within an HADR replica set is predefined and cannot be changed by the user - Minor first, DR second and Main last. If one is missing, it is skipped. + +## Target Version + +Supported Sonar target versions are: + +- 4.13.0.10.0 +- 4.12.0.10.0 +- 4.11.0.0.0 +- 4.10.0.1.0 diff --git a/modules/aws/sonar-upgrader/main.tf b/modules/aws/sonar-upgrader/main.tf index 7a4db5c1b..53091e614 100644 --- a/modules/aws/sonar-upgrader/main.tf +++ b/modules/aws/sonar-upgrader/main.tf @@ -9,9 +9,9 @@ locals { run_preflight_validations = var.run_preflight_validations run_upgrade = var.run_upgrade run_postflight_validations = var.run_postflight_validations -# clean_old_deployments = var.clean_old_deployments - stop_on_failure = var.stop_on_failure - tarball_location = jsonencode(var.tarball_location) + # clean_old_deployments = var.clean_old_deployments + stop_on_failure = var.stop_on_failure + tarball_location = jsonencode(var.tarball_location) }) } diff --git a/modules/aws/sonar-upgrader/outputs.tf b/modules/aws/sonar-upgrader/outputs.tf index 33389302c..74c16e3d7 100644 --- a/modules/aws/sonar-upgrader/outputs.tf +++ b/modules/aws/sonar-upgrader/outputs.tf @@ -1,8 +1,8 @@ -output "agentless_gw_list" { +output "agentless_gws" { value = var.agentless_gws } -output "hub_list" { +output "dsf_hubs" { value = var.dsf_hubs } diff --git a/modules/aws/sonar-upgrader/python_upgrader/README.md b/modules/aws/sonar-upgrader/python_upgrader/README.md index f5ec1e50c..b94641941 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/README.md +++ b/modules/aws/sonar-upgrader/python_upgrader/README.md @@ -18,7 +18,7 @@ Before using eDSF Kit to upgrade DSF Hubs and Agentless Gateways, it is necessar If the DSF deployment has not been deployed using the eDSF Kit, it is also necessary to satisfy the following prerequisites: -1. Grant the DSF Hubs and Agentless Gateways IAM roles access to the S3 bucket containing the DSF installation software, use the permissions specified here - [IAM Permissions for Granting Access to DSF Installation](https://github.com/imperva/dsfkit/permissions_samples/DSFIntallationAccessPermissions.txt). +1. Grant the DSF Hubs and Agentless Gateways IAM roles access to the S3 bucket containing the DSF installation software, use the permissions specified here - [IAM Permissions for Granting Access to DSF Installation](https://github.com/imperva/dsfkit/blob/master/permissions_samples/DSFIntallationAccessPermissions.txt). 2. Allow outbound connections from the DSF Hubs and Agentless Gateways to the S3 bucket containing the DSF installation software. 3. AWS CLI installed on the DSF Hubs and Agentless Gateways. diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/requirements-dev.txt b/modules/aws/sonar-upgrader/python_upgrader/tests/requirements-dev.txt index ea778e5dc..af0d1e42e 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/tests/requirements-dev.txt +++ b/modules/aws/sonar-upgrader/python_upgrader/tests/requirements-dev.txt @@ -1,2 +1,3 @@ pytest==7.3.2 -pytest-mock==3.11.1 \ No newline at end of file +pytest-mock==3.11.1 +coverage==7.2.7 \ No newline at end of file diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py b/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py new file mode 100644 index 000000000..19f806a6e --- /dev/null +++ b/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py @@ -0,0 +1,512 @@ +# test_main.py + +import pytest +import argparse +import json +from upgrade.main import main, fill_args_defaults, set_global_variables +from upgrade.upgrade_status_service import OverallUpgradeStatus +from upgrade.upgrade_exception import UpgradeException + +gw1 = { + "host": "host1", + "ssh_user": "ec2-user", + "ssh_private_key_file_path": "/home/ssh_key2.pem" +} + +gw2 = { + "host": "host2", + "ssh_user": "ec2-user", + "ssh_private_key_file_path": "/home/ssh_key2.pem" +} + +gw3 = { + "host": "host3", + "ssh_user": "ec2-user", + "ssh_private_key_file_path": "/home/ssh_key2.pem", + "proxy": { + "host": "host100", + "ssh_user": "ec2-user", + "ssh_private_key_file_path": "/home/ssh_key2.pem", + } +} + + +hub1 = { + "host": "host100", + "ssh_user": "ec2-user", + "ssh_private_key_file_path": "/home/ssh_key2.pem" +} + +hub2 = { + "host": "host101", + "ssh_user": "ec2-user", + "ssh_private_key_file_path": "/home/ssh_key2.pem" +} + +hub3 = { + "host": "host102", + "ssh_user": "ec2-user", + "ssh_private_key_file_path": "/home/ssh_key2.pem" +} + +@pytest.fixture +def setup_for_each_test(mocker): + default_args = argparse.Namespace( + agentless_gws=[], + dsf_hubs=[], + target_version="4.13", + connection_timeout=None, + test_connection=None, + run_preflight_validations=None, + run_upgrade=None, + run_postflight_validations=None, + stop_on_failure=None, + tarball_location=None, + ) + fill_args_defaults(default_args) + set_global_variables(100) + + # mock UpgradeStatusService class functions + upgrade_status_service_mock = mocker.Mock() + mocker.patch('upgrade.main.UpgradeStatusService', return_value=upgrade_status_service_mock) + mocker.patch.object(upgrade_status_service_mock, 'should_test_connection', return_value=True) + mocker.patch.object(upgrade_status_service_mock, 'should_collect_python_location', return_value=True) + mocker.patch.object(upgrade_status_service_mock, 'should_run_preflight_validations', return_value=True) + mocker.patch.object(upgrade_status_service_mock, 'should_run_upgrade', return_value=True) + mocker.patch.object(upgrade_status_service_mock, 'should_run_postflight_validations', return_value=True) + mocker.patch.object(upgrade_status_service_mock, 'get_summary', return_value="Mock Summary") + mocker.patch.object(upgrade_status_service_mock, 'are_nodes_with_upgrade_statuses', return_value=True) + mocker.patch.object(upgrade_status_service_mock, 'get_overall_upgrade_status', return_value=OverallUpgradeStatus.SUCCEEDED) + + mocker.patch('upgrade.main.join_paths', side_effect=lambda arg1, arg2, arg3: arg3) + mocker.patch('upgrade.main.read_file_contents', side_effect=lambda file_name: file_name + "_content") + + test_connection_mock = mocker.patch('upgrade.main.test_connection') + + yield default_args, upgrade_status_service_mock, test_connection_mock + + +def test_main_all_flags_disabled(setup_for_each_test, mocker): + # given + args, _, _ = setup_for_each_test + setup_custom_args(args, [{"main": gw1}], [{"main": hub1}], False, False, False, False, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script') + + # when + main(args) + + # then + run_remote_script_mock.assert_not_called() + + +def test_main_all_flags_enabled(setup_for_each_test, mocker): + # given + args, _, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}], [{"main": hub1}], True, True, True, True, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', + side_effect=create_mocked_run_remote_script_side_effects()) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 2 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 8 + for host in ["host1", "host100"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_postflight_validations.py") == 1 + + +def test_main_only_test_connection_enabled(setup_for_each_test, mocker): + # given + args, _, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}], [{"main": hub1}], True, False, False, False, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script') + + # when + main(args) + + # then + assert test_connection_mock.call_count == 2 + run_remote_script_mock.assert_not_called() + + +def test_main_only_preflight_enabled(setup_for_each_test, mocker): + # given + args, _, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}], [{"main": hub1}], False, True, False, False, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', + side_effect=create_mocked_run_remote_script_side_effects()) + + # when + main(args) + + # then + test_connection_mock.assert_not_called() + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 4 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host100", "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host100", "run_preflight_validations.py") == 1 + + +def test_main_only_upgrade_enabled(setup_for_each_test, mocker): + # given + args, _, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}], [{"main": hub1}], False, False, True, False, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', + side_effect=create_mocked_run_remote_script_side_effects()) + + # when + main(args) + + # then + test_connection_mock.assert_not_called() + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 2 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host100", "upgrade_v4_10.sh") == 1 + + +def test_main_only_postflight_enabled(setup_for_each_test, mocker): + # given + args, _, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}], [{"main": hub1}], False, False, False, True, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', + side_effect=create_mocked_run_remote_script_side_effects()) + + # when + main(args) + + # then + test_connection_mock.assert_not_called() + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 4 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "run_postflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host100", "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host100", "run_postflight_validations.py") == 1 + + +def test_main_custom_tarball(setup_for_each_test, mocker): + # given + args, _, test_connection_mock = setup_for_each_test + tarball_location = '{"s3_bucket": "my_custom_bucket", "s3_region": "my_custom_region"}' + setup_custom_args(args, [{"main": gw1}], [], True, True, True, True, True, tarball_location=tarball_location) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', + side_effect=create_mocked_run_remote_script_side_effects()) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 1 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 4 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "run_postflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "run_postflight_validations.py") == 1 + assert "my_custom_bucket" in call_args_list[2].args[4] + assert "my_custom_region" in call_args_list[2].args[4] + + +def test_main_host_with_proxy(setup_for_each_test, mocker): + # given + args, _, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw3}], [], True, True, True, True, True) + test_connection_via_proxy_mock = mocker.patch('upgrade.main.test_connection_via_proxy') + run_remote_script_via_proxy_mock = mocker.patch('upgrade.main.run_remote_script_via_proxy', + side_effect=create_mocked_run_remote_script_with_proxy_side_effects()) + + # when + main(args) + + # then + assert test_connection_via_proxy_mock.call_count == 1 + call_args_list = run_remote_script_via_proxy_mock.call_args_list + assert len(call_args_list) == 4 + assert count_remote_calls_with_host_and_script(call_args_list, "host3", "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host3", "run_postflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host3", "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host3", "run_postflight_validations.py") == 1 + + +def test_main_skip_successful_host(setup_for_each_test, mocker): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}, {"main": gw2}], [], True, True, True, True, True) + mocker.patch.object(upgrade_status_service_mock, 'should_test_connection', side_effect=lambda host: host == "host2") + mocker.patch.object(upgrade_status_service_mock, 'should_collect_python_location', side_effect=lambda host: host == "host2") + mocker.patch.object(upgrade_status_service_mock, 'should_run_preflight_validations', side_effect=lambda host: host == "host2") + mocker.patch.object(upgrade_status_service_mock, 'should_run_upgrade', side_effect=lambda host: host == "host2") + mocker.patch.object(upgrade_status_service_mock, 'should_run_postflight_validations', side_effect=lambda host: host == "host2") + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects()) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 1 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 4 + assert count_remote_calls_with_host_and_script(call_args_list, "host2", "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host2", "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host2", "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host2", "run_postflight_validations.py") == 1 + + +@pytest.mark.parametrize("preflight_not_pass_hosts, preflight_error_hosts", [ + (["host1"], []), + ([], ["host1"]), +]) +def test_main_preflight_failure_with_stop_on_failure_true(setup_for_each_test, mocker, + preflight_not_pass_hosts, preflight_error_hosts): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}, {"main": gw2}], [{"main": hub1}], True, True, True, True, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects( + preflight_validations_not_pass_hosts=preflight_not_pass_hosts, preflight_validations_error_hosts=preflight_error_hosts)) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 3 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 4 + for host in ["host1", "host2", "host100"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "run_preflight_validations.py") == 1 + + +def test_main_upgrade_failure_with_stop_on_failure_true(setup_for_each_test, mocker): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}, {"main": gw2}], [{"main": hub1}], True, True, True, True, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects( + upgrade_error_hosts=["host1"])) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 3 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 7 + for host in ["host1", "host2", "host100"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "upgrade_v4_10.sh") == 1 + + +def test_main_python_location_failure_with_stop_on_failure_false(setup_for_each_test, mocker): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}, {"main": gw2}], [{"main": hub1}], True, True, True, True, False) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects( + python_location_error_hosts=["host1"])) + mocker.patch.object(upgrade_status_service_mock, 'should_run_preflight_validations', side_effect=lambda host: host != "host1") + mocker.patch.object(upgrade_status_service_mock, 'should_run_upgrade', side_effect=lambda host: host != "host1") + mocker.patch.object(upgrade_status_service_mock, 'should_run_postflight_validations', side_effect=lambda host: host != "host1") + + # when + main(args) + + # then + assert test_connection_mock.call_count == 3 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 9 + for host in ["host1", "host2", "host100"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + for host in ["host2", "host100"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_postflight_validations.py") == 1 + + +@pytest.mark.parametrize("preflight_not_pass_hosts, preflight_error_hosts", [ + (["host1"], []), + ([], ["host1"]), +]) +def test_main_preflight_failure_with_stop_on_failure_false(setup_for_each_test, mocker, + preflight_not_pass_hosts, preflight_error_hosts): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}, {"main": gw2}], [{"main": hub1}], True, True, True, True, False) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects( + preflight_validations_not_pass_hosts=preflight_not_pass_hosts, preflight_validations_error_hosts=preflight_error_hosts)) + mocker.patch.object(upgrade_status_service_mock, 'should_run_upgrade', side_effect=lambda host: host != "host1") + mocker.patch.object(upgrade_status_service_mock, 'should_run_postflight_validations', side_effect=lambda host: host != "host1") + + # when + main(args) + + # then + assert test_connection_mock.call_count == 3 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 10 + for host in ["host1", "host2", "host100"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_preflight_validations.py") == 1 + for host in ["host2", "host100"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_postflight_validations.py") == 1 + + +def test_main_hadr_set_successful(setup_for_each_test, mocker): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1, "dr": gw2}], [{"main": hub1, "dr": hub2, "minor": hub3}], True, True, True, True, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects()) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 5 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 20 + for host in ["host1", "host2", "host100", "host101", "host102"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_postflight_validations.py") == 1 + + +def test_main_hadr_set_skip_node_after_hadr_upgrade_failure_stop_on_failure_false(setup_for_each_test, mocker): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1, "dr": gw2}], [{"main": hub1, "dr": hub2, "minor": hub3}], True, True, True, True, False) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects( + upgrade_error_hosts=["host2", "host102"])) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 5 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 12 + for host in ["host1", "host2", "host100", "host101", "host102"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host2", "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host102", "upgrade_v4_10.sh") == 1 + + +@pytest.mark.parametrize("postflight_not_pass_hosts, postflight_error_hosts", [ + (["host2", "host102"], []), + ([], ["host2", "host102"]), +]) +def test_main_hadr_set_skip_node_after_hadr_postflight_failure_stop_on_failure_false(setup_for_each_test, mocker, + postflight_not_pass_hosts, + postflight_error_hosts): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1, "dr": gw2}], [{"main": hub1, "dr": hub2, "minor": hub3}], True, True, True, True, False) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects( + postflight_validations_not_pass_hosts=postflight_not_pass_hosts, postflight_validations_error_hosts=postflight_error_hosts)) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 5 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 14 + for host in ["host1", "host2", "host100", "host101", "host102"]: + assert count_remote_calls_with_host_and_script(call_args_list, host, "get_python_location.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, host, "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host2", "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host2", "run_preflight_validations.py") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host102", "upgrade_v4_10.sh") == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host102", "run_preflight_validations.py") == 1 + + +@pytest.mark.xfail(raises=UpgradeException) +def test_main_raise_exception_on_overall_status_failed(setup_for_each_test, mocker): + # given + args, upgrade_status_service_mock, test_connection_mock = setup_for_each_test + setup_custom_args(args, [{"main": gw1}], [], True, True, True, True, True) + run_remote_script_mock = mocker.patch('upgrade.main.run_remote_script', side_effect=create_mocked_run_remote_script_side_effects( + python_location_error_hosts=["host1"])) + mocker.patch.object(upgrade_status_service_mock, 'get_overall_upgrade_status', return_value=OverallUpgradeStatus.FAILED) + + # when + main(args) + + # then + assert test_connection_mock.call_count == 1 + call_args_list = run_remote_script_mock.call_args_list + assert len(call_args_list) == 1 + assert count_remote_calls_with_host_and_script(call_args_list, "host1", "get_python_location.sh") == 1 + + +def setup_custom_args(args, agentless_gws, dsf_hubs, test_connection, run_preflight_validations, run_upgrade, + run_postflight_validations, stop_on_failure, tarball_location=None): + args.agentless_gws = json.dumps(agentless_gws) + args.dsf_hubs = json.dumps(dsf_hubs) + args.test_connection = test_connection + args.run_preflight_validations = run_preflight_validations + args.run_upgrade = run_upgrade + args.run_postflight_validations = run_postflight_validations + args.stop_on_failure = stop_on_failure + if tarball_location is not None: + args.tarball_location = tarball_location + + +def create_mocked_run_remote_script_side_effects(python_location_error_hosts=None, + preflight_validations_not_pass_hosts=None, + preflight_validations_error_hosts=None, + upgrade_error_hosts=None, + postflight_validations_not_pass_hosts=None, + postflight_validations_error_hosts=None): + def mocked_run_remote_script(host, remote_user, remote_key_filename, script_contents, script_run_command, + connection_timeout): + if "get_python_location.sh" in script_contents: + if python_location_error_hosts is not None and host in python_location_error_hosts: + return "get_python_location error" + else: + return "Python location: test_python_location" + elif "run_preflight_validations.py" in script_contents: + if preflight_validations_error_hosts is not None and host in preflight_validations_error_hosts: + return "run_preflight_validations error" + elif preflight_validations_not_pass_hosts is not None and host in preflight_validations_not_pass_hosts: + return 'Preflight validations result: {"different_version": true, "min_version": true, ' \ + '"max_version_hop": true, "enough_free_disk_space": false}' + else: + return 'Preflight validations result: {"different_version": true, "min_version": true, ' \ + '"max_version_hop": true, "enough_free_disk_space": true}' + elif "upgrade_v4_10.sh" in script_contents: + if upgrade_error_hosts is not None and host in upgrade_error_hosts: + return "upgrade error" + else: + return "Upgrade completed" + elif "run_postflight_validations.py" in script_contents: + if postflight_validations_error_hosts is not None and host in postflight_validations_error_hosts: + return "run_postflight_validations error" + elif postflight_validations_not_pass_hosts is not None and host in postflight_validations_not_pass_hosts: + return 'Postflight validations result: {"correct_version": false}' + else: + return 'Postflight validations result: {"correct_version": true}' + else: + raise Exception("unknown script") + return mocked_run_remote_script + + +def create_mocked_run_remote_script_with_proxy_side_effects(): + mocked_run_remote_script = create_mocked_run_remote_script_side_effects() + def mocked_run_remote_with_proxy_script(host, remote_user, remote_key_filename, script_contents, script_run_command, + proxy_host, proxy_user, proxy_key_filename, connection_timeout): + return mocked_run_remote_script(host, remote_user, remote_key_filename, script_contents, script_run_command, + connection_timeout) + return mocked_run_remote_with_proxy_script + + +def count_remote_calls_with_host_and_script(call_args_list, host, script_content): + return sum(1 for call_args in call_args_list if call_args.args[0] == host and script_content in call_args.args[3]) diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/test_upgrade_status_service.py b/modules/aws/sonar-upgrader/python_upgrader/tests/test_upgrade_status_service.py index 7cf82fdab..46eb9e306 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/tests/test_upgrade_status_service.py +++ b/modules/aws/sonar-upgrader/python_upgrader/tests/test_upgrade_status_service.py @@ -1,7 +1,8 @@ # test_upgrade_status_service.py import pytest -from upgrade.upgrade_status_service import UpgradeStatusService, UpgradeStatus +from collections import Counter +from upgrade.upgrade_status_service import UpgradeStatusService, UpgradeStatus, OverallUpgradeStatus from unittest.mock import ANY # Import ANY from unittest.mock @@ -11,11 +12,11 @@ def setup_for_each_test(): yield upgrade_status_service -def test_init_status_when_file_not_exist(mocker, setup_for_each_test): +def test_init_status_when_file_not_exist(setup_for_each_test, mocker): # given upgrade_status_service = setup_for_each_test is_file_exist_mock = mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) - read_file_contents_mock = mocker.patch('upgrade.upgrade_status_service.read_file_contents') + read_file_contents_mock = mocker.patch('upgrade.upgrade_status_service.read_file_contents') # for not_called check update_file_safely_mock = mocker.patch('upgrade.upgrade_status_service.update_file_safely') # when @@ -31,7 +32,7 @@ def test_init_status_when_file_not_exist(mocker, setup_for_each_test): update_file_safely_mock.assert_called_once_with("upgrade_status.json", ANY) -def test_init_status_when_file_exists(mocker, setup_for_each_test): +def test_init_status_when_file_exists(setup_for_each_test, mocker): # given upgrade_status_service = setup_for_each_test state_file_content = """ @@ -48,7 +49,7 @@ def test_init_status_when_file_exists(mocker, setup_for_each_test): read_file_contents_mock = mocker.patch('upgrade.upgrade_status_service.read_file_contents', return_value=state_file_content) update_file_safely_mock = mocker.patch('upgrade.upgrade_status_service.update_file_safely') - copy_file_mock = mocker.patch('upgrade.upgrade_status_service.copy_file') + copy_file_mock = mocker.patch('upgrade.upgrade_status_service.copy_file') # for not_called check # when upgrade_status_service.init_upgrade_status(["host1", "host2"], "4.12") @@ -64,7 +65,7 @@ def test_init_status_when_file_exists(mocker, setup_for_each_test): copy_file_mock.assert_not_called() -def test_init_status_when_file_exists_with_different_target_version(mocker, setup_for_each_test): +def test_init_status_when_file_exists_with_different_target_version(setup_for_each_test, mocker): # given upgrade_status_service = setup_for_each_test state_file_content = """ @@ -97,11 +98,11 @@ def test_init_status_when_file_exists_with_different_target_version(mocker, setu copy_file_mock.assert_called_once_with("upgrade_status.json", ANY) -def test_update_upgrade_status(mocker, setup_for_each_test): +def test_update_upgrade_status(setup_for_each_test, mocker): # given upgrade_status_service = setup_for_each_test is_file_exist_mock = mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) - read_file_contents_mock = mocker.patch('upgrade.upgrade_status_service.read_file_contents') + read_file_contents_mock = mocker.patch('upgrade.upgrade_status_service.read_file_contents') # for not_called check update_file_safely_mock = mocker.patch('upgrade.upgrade_status_service.update_file_safely') # when @@ -118,11 +119,11 @@ def test_update_upgrade_status(mocker, setup_for_each_test): assert update_file_safely_mock.call_count == 2 -def test_flush(mocker, setup_for_each_test): +def test_flush(setup_for_each_test, mocker): # given upgrade_status_service = setup_for_each_test is_file_exist_mock = mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) - read_file_contents_mock = mocker.patch('upgrade.upgrade_status_service.read_file_contents') + read_file_contents_mock = mocker.patch('upgrade.upgrade_status_service.read_file_contents') # for not_called check update_file_safely_mock = mocker.patch('upgrade.upgrade_status_service.update_file_safely') # when @@ -134,3 +135,125 @@ def test_flush(mocker, setup_for_each_test): read_file_contents_mock.assert_not_called() assert update_file_safely_mock.call_count == 2 + +@pytest.mark.parametrize("should_method", [ + UpgradeStatusService.should_test_connection, + UpgradeStatusService.should_collect_python_location, + UpgradeStatusService.should_run_preflight_validations, + UpgradeStatusService.should_run_upgrade, + UpgradeStatusService.should_run_postflight_validations, +]) +def test_should_run_step_methods_on_succeeded_status(setup_for_each_test, mocker, should_method): + # when + mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) + mocker.patch('upgrade.upgrade_status_service.update_file_safely') + upgrade_status_service = setup_for_each_test + upgrade_status_service.init_upgrade_status(["host1"], "4.12") + upgrade_status_service.update_upgrade_status("host1", UpgradeStatus.SUCCEEDED) + + # given + result = should_method(upgrade_status_service, "host1") + + # then + assert result is False + + +def test_get_upgrade_statuses(setup_for_each_test, mocker): + # when + mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) + mocker.patch('upgrade.upgrade_status_service.update_file_safely') + upgrade_status_service = setup_for_each_test + upgrade_status_service.init_upgrade_status(["host1", "host2", "host3", "host4"], "4.12") + upgrade_status_service.update_upgrade_status("host1", UpgradeStatus.SUCCEEDED) + upgrade_status_service.update_upgrade_status("host2", UpgradeStatus.SUCCEEDED) + upgrade_status_service.update_upgrade_status("host3", UpgradeStatus.UPGRADE_FAILED) + + # given + result = upgrade_status_service.get_upgrade_statuses() + + # then + # use Counter to ignore order + assert Counter(result) == Counter([UpgradeStatus.SUCCEEDED, UpgradeStatus.SUCCEEDED, + UpgradeStatus.UPGRADE_FAILED, UpgradeStatus.SUCCEEDED.NOT_STARTED]) + + +@pytest.mark.parametrize("statuses_list, expected_result", [ + ([UpgradeStatus.SUCCEEDED, UpgradeStatus.UPGRADE_FAILED, UpgradeStatus.NOT_STARTED], True), + ([UpgradeStatus.SUCCEEDED, UpgradeStatus.UPGRADE_FAILED, UpgradeStatus.NOT_STARTED, UpgradeStatus.UPGRADE_SUCCEEDED], True), + ([UpgradeStatus.SUCCEEDED, UpgradeStatus.UPGRADE_FAILED], False), +]) +def test_are_nodes_with_upgrade_statuses(setup_for_each_test, mocker, statuses_list, expected_result): + # when + mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) + mocker.patch('upgrade.upgrade_status_service.update_file_safely') + upgrade_status_service = setup_for_each_test + upgrade_status_service.init_upgrade_status(["host1", "host2", "host3"], "4.12") + upgrade_status_service.update_upgrade_status("host1", UpgradeStatus.SUCCEEDED) + upgrade_status_service.update_upgrade_status("host2", UpgradeStatus.NOT_STARTED) + upgrade_status_service.update_upgrade_status("host3", UpgradeStatus.UPGRADE_FAILED) + + # given + result = upgrade_status_service.are_nodes_with_upgrade_statuses(statuses_list) + + # then + assert result == expected_result + + +@pytest.mark.parametrize("statuses_list, expected_overall_upgrade_status", [ + ([UpgradeStatus.NOT_STARTED, UpgradeStatus.NOT_STARTED, UpgradeStatus.NOT_STARTED], OverallUpgradeStatus.NOT_STARTED), + ([UpgradeStatus.SUCCEEDED, UpgradeStatus.SUCCEEDED, UpgradeStatus.SUCCEEDED], OverallUpgradeStatus.SUCCEEDED), + ([UpgradeStatus.SUCCEEDED, UpgradeStatus.SUCCEEDED_WITH_WARNINGS, UpgradeStatus.SUCCEEDED], OverallUpgradeStatus.SUCCEEDED_WITH_WARNINGS), + ([UpgradeStatus.NOT_STARTED, UpgradeStatus.SUCCEEDED, UpgradeStatus.SUCCEEDED], OverallUpgradeStatus.RUNNING), + ([UpgradeStatus.RUNNING_UPGRADE, UpgradeStatus.SUCCEEDED, UpgradeStatus.SUCCEEDED], OverallUpgradeStatus.RUNNING), + ([UpgradeStatus.SUCCEEDED, UpgradeStatus.UPGRADE_FAILED, UpgradeStatus.NOT_STARTED], OverallUpgradeStatus.RUNNING), + ([UpgradeStatus.SUCCEEDED, UpgradeStatus.UPGRADE_FAILED, UpgradeStatus.SUCCEEDED_WITH_WARNINGS], OverallUpgradeStatus.FAILED), +]) +def test_get_overall_upgrade_status(setup_for_each_test, mocker, statuses_list, expected_overall_upgrade_status): + # when + mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) + mocker.patch('upgrade.upgrade_status_service.update_file_safely') + upgrade_status_service = setup_for_each_test + upgrade_status_service.init_upgrade_status(["host1", "host2", "host3"], "4.12") + upgrade_status_service.update_upgrade_status("host1", statuses_list[0]) + upgrade_status_service.update_upgrade_status("host2", statuses_list[1]) + upgrade_status_service.update_upgrade_status("host3", statuses_list[2]) + + # given + result = upgrade_status_service.get_overall_upgrade_status() + + # then + assert result == expected_overall_upgrade_status + + +def test_get_summary(setup_for_each_test, mocker): + # when + mocker.patch('upgrade.upgrade_status_service.is_file_exist', return_value=False) + mocker.patch('upgrade.upgrade_status_service.update_file_safely') + upgrade_status_service = setup_for_each_test + upgrade_status_service.init_upgrade_status(["host1", "host2"], "4.12") + upgrade_status_service.update_upgrade_status("host1", UpgradeStatus.TEST_CONNECTION_FAILED, "host1 error") + upgrade_status_service.update_upgrade_status("host2", UpgradeStatus.UPGRADE_FAILED, "host2 old error") + upgrade_status_service.update_upgrade_status("host2", UpgradeStatus.UPGRADE_SUCCEEDED) # empty message + + # given + summary_result = upgrade_status_service.get_summary() + + # then + assert "Overall upgrade status:" in summary_result + assert OverallUpgradeStatus.RUNNING.value in summary_result # expected overall status + assert "host1" in summary_result + assert UpgradeStatus.TEST_CONNECTION_FAILED.value in summary_result + assert "host1 error" in summary_result + assert "host2" in summary_result + assert UpgradeStatus.UPGRADE_SUCCEEDED.value in summary_result + assert UpgradeStatus.UPGRADE_FAILED.value not in summary_result + assert "host2 old error" not in summary_result + + +# def test_1(): +# service = UpgradeStatusService() +# service.init_upgrade_status(["1.2.3.7", "host2"], "4.13") +# service.update_upgrade_status("1.2.3.7", UpgradeStatus.PREFLIGHT_VALIDATIONS_SUCCEEDED, "abcd") +# service.update_upgrade_status("host2", UpgradeStatus.PREFLIGHT_VALIDATIONS_SUCCEEDED) +# service.flush() +# print(service.get_summary()) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index 2fcb98d9d..c9112e50f 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -18,6 +18,10 @@ POSTFLIGHT_VALIDATIONS_SCRIPT_NAME = "run_postflight_validations.py" CLEAN_OLD_DEPLOYMENTS_SCRIPT_NAME = "clean_old_deployments.sh" +# Globals +_connection_timeout = None +_run_dummy_upgrade = False + # Helper functions @@ -38,8 +42,8 @@ def str_to_bool(arg): def set_socket_timeout(): print(f"Default socket timeout: {socket.getdefaulttimeout()}") - socket.setdefaulttimeout(CONNECTION_TIMEOUT) - print(f"Default socket timeout was set to {CONNECTION_TIMEOUT} seconds to ensure uniform behavior across " + socket.setdefaulttimeout(_connection_timeout) + print(f"Default socket timeout was set to {_connection_timeout} seconds to ensure uniform behavior across " f"different platforms") @@ -146,14 +150,14 @@ def run_remote_script_maybe_with_proxy(dsf_node, script_contents, script_run_com dsf_node.get("proxy").get('host'), dsf_node.get("proxy").get("ssh_user"), dsf_node.get("proxy").get("ssh_private_key_file_path"), - CONNECTION_TIMEOUT) + _connection_timeout) else: script_output = run_remote_script(dsf_node.get('host'), dsf_node.get("ssh_user"), dsf_node.get("ssh_private_key_file_path"), script_contents, script_run_command, - CONNECTION_TIMEOUT) + _connection_timeout) return script_output @@ -165,12 +169,12 @@ def test_connection_maybe_with_proxy(dsf_node): dsf_node.get("proxy").get('host'), dsf_node.get("proxy").get("ssh_user"), dsf_node.get("proxy").get("ssh_private_key_file_path"), - CONNECTION_TIMEOUT) + _connection_timeout) else: test_connection(dsf_node.get('host'), dsf_node.get("ssh_user"), dsf_node.get("ssh_private_key_file_path"), - CONNECTION_TIMEOUT) + _connection_timeout) def print_summary(upgrade_status_service, overall_upgrade_status=None): @@ -297,7 +301,7 @@ def parse_args(): return args -def fill_args_defaults(): +def fill_args_defaults(args): if args.connection_timeout is None: args.connection_timeout = 90 if args.test_connection is None: @@ -721,7 +725,7 @@ def upgrade_dsf_node(extended_node, target_version, upgrade_script_file_name, st def run_upgrade_script(dsf_node, target_version, tarball_location, upgrade_script_file_name): - if run_dummy_upgrade: + if _run_dummy_upgrade: print(f"Running dummy upgrade script") script_file_name = 'dummy_upgrade_script.sh' else: @@ -868,8 +872,8 @@ def are_postflight_validations_passed(postflight_validations_result): def verify_successful_run(overall_upgrade_status, args, upgrade_status_service): ''' - Verifies if the scrip run was successful from the applicative point of view. - For example, if if no exceptions were raised but the upgrade failed, the run is considered failed. + Verifies if the script run was successful from the applicative point of view. + For example, if no exceptions were raised but the upgrade failed, the run is considered failed. :param overall_upgrade_status: The overall upgrade status provided by the upgrade status service :param args: The program arguments which include the configuration options :param upgrade_status_service @@ -922,12 +926,14 @@ def verify_successful_run_by_configuration_options(args, upgrade_status_service) return is_successful_run -if __name__ == "__main__": - run_dummy_upgrade = False - args = parse_args() +def set_global_variables(connection_timeout): + global _connection_timeout + _connection_timeout = int(connection_timeout) - fill_args_defaults() - CONNECTION_TIMEOUT = int(args.connection_timeout) +if __name__ == "__main__": + args = parse_args() + fill_args_defaults(args) + set_global_variables(args.connection_timeout) main(args) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/upgrade_v4_10.sh b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/upgrade_v4_10.sh index 02bf5f2f9..54f0d4b63 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/upgrade_v4_10.sh +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/upgrade_v4_10.sh @@ -33,24 +33,26 @@ echo "Tarball file name: ${installation_s3_key}, in bucket: ${installation_s3_bu # For example: /imperva/apps/jsonar/apps/4.11.0.0.0 JSONAR_BASEDIR=$(grep "^JSONAR_BASEDIR=" /etc/sysconfig/jsonar | cut -d"=" -f2) JSONAR_VERSION=$(grep "^JSONAR_VERSION=" /etc/sysconfig/jsonar | cut -d"=" -f2) +echo "Current Sonar version ${JSONAR_VERSION}" + # For example, /imperva/apps -APPS_DIR=$(echo "$JSONAR_BASEDIR" | sed "s|/jsonar/apps/${JSONAR_VERSION}||") -echo "Apps directory: ${APPS_DIR}" +EXTRACTION_BASE_DIR=$(echo "$JSONAR_BASEDIR" | sed "s|/jsonar/apps/${JSONAR_VERSION}||") TARBALL_FILE_NAME=$(basename ${installation_s3_key}) -TARBALL_FILE=$APPS_DIR/$TARBALL_FILE_NAME +TARBALL_FILE=$EXTRACTION_BASE_DIR/$TARBALL_FILE_NAME VERSION="${TARBALL_FILE#*-}" VERSION="${VERSION%.tar.gz}" echo "Version: $VERSION" -EXTRACTION_DIR="${APPS_DIR}/jsonar/apps/${VERSION}" +EXTRACTION_DIR="${EXTRACTION_BASE_DIR}/jsonar/apps/${VERSION}" +echo "Tarball extraction base directory: ${EXTRACTION_BASE_DIR}" echo "Tarball extraction directory: $EXTRACTION_DIR" function extract_tarball() { echo "Extracting tarball..." - sudo tar -xf $TARBALL_FILE_NAME -gz -C $APPS_DIR - sudo chown -R sonarw:sonar $APPS_DIR + sudo tar -xf $TARBALL_FILE_NAME -gz -C $EXTRACTION_BASE_DIR + sudo chown -R sonarw:sonar $EXTRACTION_BASE_DIR echo "Extracting tarball completed" } diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/upgrade_status_service.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/upgrade_status_service.py index 7a2bee11e..c1ed751fa 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/upgrade_status_service.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/upgrade_status_service.py @@ -369,17 +369,3 @@ class OverallUpgradeStatus(Enum): SUCCEEDED_WITH_WARNINGS = "Succeeded with warnings" FAILED = "Failed" UNKNOWN = "Unknown" - - -def test1(): - service = UpgradeStatusService() - service.init_upgrade_status(["1.2.3.7", "host2"], "4.13") - service.update_upgrade_status("1.2.3.7", UpgradeStatus.PREFLIGHT_VALIDATIONS_SUCCEEDED, "abcd") - service.update_upgrade_status("host2", UpgradeStatus.PREFLIGHT_VALIDATIONS_SUCCEEDED) - service.flush() - print(service.get_summary()) - - -if __name__ == "__main__": - print("UpgradeStatusService test") - test1() diff --git a/modules/azurerm/agentless-gw/README.md b/modules/azurerm/agentless-gw/README.md index e08cb5899..94eafc637 100644 --- a/modules/azurerm/agentless-gw/README.md +++ b/modules/azurerm/agentless-gw/README.md @@ -100,4 +100,4 @@ SSH access is required to provision this module. To SSH into the Agentless Gatew For more information about the Agentless Gateway and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/v4.12-sonar-user-guide/page/80401.htm). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/azurerm/hub/README.md b/modules/azurerm/hub/README.md index 41955ef14..88f835aa1 100644 --- a/modules/azurerm/hub/README.md +++ b/modules/azurerm/hub/README.md @@ -98,4 +98,4 @@ SSH access is required to provision this module. To SSH into the DSF Hub instanc For more information about the DSF Hub and its features, refer to the official documentation [here](https://docs.imperva.com/bundle/v4.12-sonar-user-guide/page/80401.htm). -For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.5.7). \ No newline at end of file +For additional information about DSF deployment using terraform, refer to the main repo README [here](https://github.com/imperva/dsfkit/tree/1.6.0). \ No newline at end of file diff --git a/modules/azurerm/sonar-base-instance/main.tf b/modules/azurerm/sonar-base-instance/main.tf index 60d01f1f3..ff8f658c9 100644 --- a/modules/azurerm/sonar-base-instance/main.tf +++ b/modules/azurerm/sonar-base-instance/main.tf @@ -72,8 +72,8 @@ resource "azurerm_linux_virtual_machine" "dsf_base_instance" { } identity { - type = "UserAssigned" - identity_ids = [azurerm_user_assigned_identity.dsf_base.id] + type = "UserAssigned" + identity_ids = [azurerm_user_assigned_identity.dsf_base.id] } tags = merge(var.tags, { Name = var.name }) @@ -83,12 +83,15 @@ resource "azurerm_linux_virtual_machine" "dsf_base_instance" { custom_data ] } + depends_on = [ + azurerm_role_assignment.dsf_base_storage_role_assignment + ] } resource "azurerm_user_assigned_identity" "dsf_base" { name = var.name resource_group_name = var.resource_group.name - location = var.resource_group.location + location = var.resource_group.location } data "azurerm_subscription" "subscription" { diff --git a/modules/null/federation/variables.tf b/modules/null/federation/variables.tf index 7afdf2c00..ba02a7441 100644 --- a/modules/null/federation/variables.tf +++ b/modules/null/federation/variables.tf @@ -1,9 +1,9 @@ variable "gw_info" { type = object({ - gw_ip_address = string + gw_ip_address = string gw_federation_ip_address = string - gw_private_ssh_key_path = string - gw_ssh_user = string + gw_private_ssh_key_path = string + gw_ssh_user = string }) nullable = false @@ -12,10 +12,10 @@ variable "gw_info" { variable "hub_info" { type = object({ - hub_ip_address = string + hub_ip_address = string hub_federation_ip_address = string - hub_private_ssh_key_path = string - hub_ssh_user = string + hub_private_ssh_key_path = string + hub_ssh_user = string }) nullable = false diff --git a/modules/sonar_python_upgrader_1_5_7.zip b/modules/sonar_python_upgrader_1_5_7.zip deleted file mode 100644 index 58e20ae4d..000000000 Binary files a/modules/sonar_python_upgrader_1_5_7.zip and /dev/null differ diff --git a/modules/sonar_python_upgrader_1_6_0.zip b/modules/sonar_python_upgrader_1_6_0.zip new file mode 100644 index 000000000..ff6d89292 Binary files /dev/null and b/modules/sonar_python_upgrader_1_6_0.zip differ