From 69b74ae7ea26df7f134848ec08285743711938ed Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Mon, 16 Oct 2023 16:48:25 +0300 Subject: [PATCH 01/13] Sonar upgrade preflight validations - small refactor --- .../python_upgrader/run_preflight_validations.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py index c43efa7e5..e65b30e6e 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py @@ -10,18 +10,24 @@ def main(target_version): print("---------------------------------------------------------------------") time = datetime.now().strftime("%a %b %d %H:%M:%S UTC %Y") print(f"Running upgrade preflight validations at {time}") + result = validate() + result_json_string = json.dumps(result) + # The string "Preflight validations result:" is part of the protocol, if you change it, change its usage + print(f"Preflight validations result: {result_json_string}") + print("---------------------------------------------------------------------") + + +def validate(): source_version = get_sonar_version() different_version, min_version_validation_passed, max_version_hop_validation_passed = \ validate_sonar_version(source_version, target_version) + result = { "different_version": different_version, "min_version": min_version_validation_passed, "max_version_hop": max_version_hop_validation_passed } - result_json_string = json.dumps(result) - # The string "Preflight validations result:" is part of the protocol, if you change it, change its usage - print(f"Preflight validations result: {result_json_string}") - print("---------------------------------------------------------------------") + return result def get_sonar_version(): From 8ef2e645317b61d8194677930606a1e4a7325403 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Mon, 16 Oct 2023 17:10:10 +0300 Subject: [PATCH 02/13] Sonar upgrade preflight validations - handled shell script errors --- .../python_upgrader/run_preflight_validations.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py index e65b30e6e..78fadc997 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py @@ -10,13 +10,21 @@ def main(target_version): print("---------------------------------------------------------------------") time = datetime.now().strftime("%a %b %d %H:%M:%S UTC %Y") print(f"Running upgrade preflight validations at {time}") - result = validate() + result = try_validate() result_json_string = json.dumps(result) # The string "Preflight validations result:" is part of the protocol, if you change it, change its usage print(f"Preflight validations result: {result_json_string}") print("---------------------------------------------------------------------") +def try_validate(): + try: + return validate() + except Exception as ex: + print(f"Preflight validations failed with exception: {str(ex)}") + return {} + + def validate(): source_version = get_sonar_version() different_version, min_version_validation_passed, max_version_hop_validation_passed = \ From 326421d7e8c6f0e40da00747d9ca69fdb4161c91 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Mon, 16 Oct 2023 17:14:22 +0300 Subject: [PATCH 03/13] Sonar upgrade postflight validations - handled shell script errors --- .../run_postflight_validations.py | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py index 7f7db0dd8..3be52c513 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py @@ -9,15 +9,28 @@ def main(target_version): print("-----------------------------------------------------------------------") time = datetime.now().strftime("%a %b %d %H:%M:%S UTC %Y") print(f"Running upgrade postlight validations at {time}") + result = try_validate() + result_json_string = json.dumps(result) + # The string "Preflight validations result:" is part of the protocol, if you change it, change its usage + print(f"Postflight validations result: {result_json_string}") + print("-----------------------------------------------------------------------") + + +def try_validate(): + try: + return validate() + except Exception as ex: + print(f"Postflight validations failed with exception: {str(ex)}") + return {} + + +def validate(): actual_version = get_sonar_version() correct_version = target_version == actual_version result = { "correct_version": correct_version } - result_json_string = json.dumps(result) - # The string "Preflight validations result:" is part of the protocol, if you change it, change its usage - print(f"Postflight validations result: {result_json_string}") - print("-----------------------------------------------------------------------") + return result def get_sonar_version(): From 584295e990201f87b36ff3e194d1727377576874 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Tue, 17 Oct 2023 11:43:28 +0300 Subject: [PATCH 04/13] EDSF-432 Add preflight validation: Free space /data >= 20GB --- .../run_postflight_validations.py | 2 +- .../run_preflight_validations.py | 61 +++++++++++++++---- 2 files changed, 51 insertions(+), 12 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py index 3be52c513..639467ec5 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/run_postflight_validations.py @@ -35,7 +35,7 @@ def validate(): def get_sonar_version(): jsonar_file_path = "/etc/sysconfig/jsonar" - target_key = "VERSION=" + target_key = "JSONAR_VERSION=" version = None diff --git a/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py index 78fadc997..652142d3d 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/run_preflight_validations.py @@ -4,6 +4,7 @@ import json from decimal import Decimal from datetime import datetime +import shutil def main(target_version): @@ -26,35 +27,56 @@ def try_validate(): def validate(): - source_version = get_sonar_version() + source_version, data_dir_path = get_sonar_info() different_version, min_version_validation_passed, max_version_hop_validation_passed = \ validate_sonar_version(source_version, target_version) + enough_free_disk_space = validate_disk_space(data_dir_path) result = { "different_version": different_version, "min_version": min_version_validation_passed, - "max_version_hop": max_version_hop_validation_passed + "max_version_hop": max_version_hop_validation_passed, + "enough_free_disk_space": enough_free_disk_space } return result -def get_sonar_version(): +def get_sonar_info(): jsonar_file_path = "/etc/sysconfig/jsonar" - target_key = "VERSION=" + version_key = "JSONAR_VERSION=" + data_dir_path_key = "JSONAR_DATADIR=" version = None + data_dir_path = None with open(jsonar_file_path, "r") as file: for line in file: - if target_key in line: - version = line.split(target_key, 1)[1].strip() - break # Break once the key is found - + if version_key in line: + version = get_value_in_line(line, version_key) + if data_dir_path_key in line: + data_dir_path = get_value_in_line(line, data_dir_path_key) + + validate_sonar_version_found(version, jsonar_file_path) + validate_data_dir_path_found(data_dir_path, jsonar_file_path) + return version, data_dir_path + + +def get_value_in_line(line, key): + return line.split(key, 1)[1].strip() + + +def validate_sonar_version_found(version, jsonar_file_path): if version is not None: - print("Found Sonar version:", version) + print(f"Found Sonar version: {version}") else: - print(f"Sonar version not found in the file {jsonar_file_path}") - return version + raise Exception(f"Sonar version not found in the file {jsonar_file_path}") + + +def validate_data_dir_path_found(data_dir, jsonar_file_path): + if data_dir is not None: + print(f"Found /data directory path: {data_dir}") + else: + raise Exception(f"/data directory path not found in the file {jsonar_file_path}") def validate_sonar_version(source_version, target_version): @@ -97,6 +119,23 @@ def validate_max_version_hop(source_major_version, target_major_version): return hop <= 0.02 +def validate_disk_space(data_dir_path): + required_space_gb = 20 + + enough_free_disk_space = check_free_space(data_dir_path, required_space_gb) + if enough_free_disk_space: + print(f"There is more than {required_space_gb} GB of free space in {data_dir_path}") + else: + print(f"There is not enough free space in {data_dir_path}. Must be {required_space_gb} or more") + return enough_free_disk_space + + +def check_free_space(directory, required_space_gb): + free_space = shutil.disk_usage(directory).free / (2**30) + print(f"There is {required_space_gb} free space in {directory}") + return free_space >= required_space_gb + + if __name__ == "__main__": target_version = sys.argv[1] main(target_version) From 916ffcda34e6cbd47c931628745fd3f2a46b7323 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Tue, 17 Oct 2023 12:41:20 +0300 Subject: [PATCH 05/13] EDSF-432 Add preflight validation: Free space /data >= 20GB --- modules/aws/sonar-upgrader/python_upgrader/main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/main.py b/modules/aws/sonar-upgrader/python_upgrader/main.py index b7ecb4244..03f63ca4f 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/main.py @@ -498,7 +498,8 @@ def extract_preflight_validations_result(script_output): def are_preflight_validations_passed(preflight_validations_result): return preflight_validations_result.get('different_version') \ and preflight_validations_result.get('min_version') \ - and preflight_validations_result.get('max_version_hop') + and preflight_validations_result.get('max_version_hop') \ + and preflight_validations_result.get('enough_free_disk_space') def maybe_upgrade_and_postflight(agentless_gws, hubs, target_version, run_upgrade, run_postflight_validations, From cc7e3612ac9a23149dce1bdd08f55f411a4f3e6a Mon Sep 17 00:00:00 2001 From: Sivan Hajbi <117824107+sivan-hajbi-imperva@users.noreply.github.com> Date: Tue, 17 Oct 2023 14:10:39 +0300 Subject: [PATCH 06/13] fix upgrade summary friendly status (#285) --- .../sonar-upgrader/python_upgrader/upgrade_status_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade_status_service.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade_status_service.py index ed66e0771..e365deced 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade_status_service.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade_status_service.py @@ -174,7 +174,7 @@ def get_summary(self): for host in upgrade_statuses.keys(): padded_host = "{:<45}".format(host) optional_message = upgrade_statuses.get(host).get('message') - summary += f"\n {padded_host}: {upgrade_statuses.get(host).get('status')}" + summary += f"\n {padded_host}: {upgrade_statuses.get(host).get('status').value}" if optional_message is not None: summary += f". Message: {optional_message}" return summary From 557a2dc8d98fa7e5ff10a107fc3f029361d18267 Mon Sep 17 00:00:00 2001 From: Linda Nasredin Date: Tue, 17 Oct 2023 15:12:53 +0300 Subject: [PATCH 07/13] Sonar upgrade - added tarball_location variable to example (#286) * Sonar upgrade - added tarball_location variable to example * Sonar upgrade - added tarball_location variable to example --- examples/sonar_upgrade/main.tf | 6 ------ examples/sonar_upgrade/variables.tf | 9 +++++++++ 2 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 examples/sonar_upgrade/variables.tf diff --git a/examples/sonar_upgrade/main.tf b/examples/sonar_upgrade/main.tf index 5caae6552..633f9c5bd 100644 --- a/examples/sonar_upgrade/main.tf +++ b/examples/sonar_upgrade/main.tf @@ -83,10 +83,4 @@ module "sonar_upgrader" { run_postflight_validations = true stop_on_failure = true - # Fill if using your S3 bucket, remove if using Imperva's S3 bucket - tarball_location = { - "s3_bucket" = "myBucket" - "s3_region" = "us-east-1" - "s3_key" = "prefix/jsonar-x.y.z.w.u.tar.gz" - } } diff --git a/examples/sonar_upgrade/variables.tf b/examples/sonar_upgrade/variables.tf new file mode 100644 index 000000000..d2c9ef190 --- /dev/null +++ b/examples/sonar_upgrade/variables.tf @@ -0,0 +1,9 @@ +variable "tarball_location" { + type = object({ + s3_bucket = string + s3_region = string + s3_key = string + }) + description = "S3 bucket location of the DSF installation software. s3_key is the full path to the tarball file within the bucket, for example, 'prefix/jsonar-x.y.z.w.u.tar.gz'" + default = null +} From 28ec73283187da9fa28aad7cda3509971fa1aac6 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Tue, 17 Oct 2023 16:42:51 +0300 Subject: [PATCH 08/13] Updated README links for DSF 4.13/14.13, but not for Sonar since can't find the link yet [skip actions] --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9b65e9a2d..3ff883807 100644 --- a/README.md +++ b/README.md @@ -102,9 +102,9 @@ This guide references the following information and links, some of which are ava Sonar v4.12 - DAM v14.12 + DAM v14.13 - DRA v4.12 + DRA v4.13 DSF Components Overview From a6becad0cddbb20b43d0f7e12b87d4c34f24b7a6 Mon Sep 17 00:00:00 2001 From: eytannnaim <80354890+eytannnaim@users.noreply.github.com> Date: Thu, 19 Oct 2023 12:20:00 +0300 Subject: [PATCH 09/13] Azure test (#287) --- .github/workflows/dsf_poc_cli.yml | 1 - .github/workflows/dsf_poc_cli_azure.yml | 214 ++++++++++++++++++++++++ .github/workflows/nightly_manager.yml | 22 +++ .github/workflows/sonar_poc_cli.yml | 2 +- 4 files changed, 237 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/dsf_poc_cli_azure.yml diff --git a/.github/workflows/dsf_poc_cli.yml b/.github/workflows/dsf_poc_cli.yml index 4abbe6690..197e8bbff 100644 --- a/.github/workflows/dsf_poc_cli.yml +++ b/.github/workflows/dsf_poc_cli.yml @@ -28,7 +28,6 @@ on: env: TF_CLI_ARGS: "-no-color" TF_INPUT: 0 - TF_VAR_gw_count: 2 AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/dsf_poc_cli_azure.yml b/.github/workflows/dsf_poc_cli_azure.yml new file mode 100644 index 000000000..50ae2b1b1 --- /dev/null +++ b/.github/workflows/dsf_poc_cli_azure.yml @@ -0,0 +1,214 @@ +name: 'DSF POC CLI - Azure' + +on: + workflow_call: + inputs: + use_modules_from_terraform_registry: + required: true + type: boolean + explicit_ref: + required: true + type: string + secrets: + AWS_ACCESS_KEY_ID: + required: true + AWS_SECRET_ACCESS_KEY: + required: true + SLACK_WEBHOOK_URL: + required: true + ARM_CLIENT_SECRET: + required: true + + workflow_dispatch: + inputs: + use_modules_from_terraform_registry: + type: boolean + required: false + +env: + TF_CLI_ARGS: "-no-color" + TF_INPUT: 0 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} # aws creds are needed for s3 backend + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + ARM_SUBSCRIPTION_ID: ${{ vars.ARM_SUBSCRIPTION_ID }} + ARM_CLIENT_ID: ${{ vars.ARM_CLIENT_ID }} + ARM_TENANT_ID: ${{ vars.ARM_TENANT_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} +permissions: + contents: read + +jobs: + terraform: + strategy: + fail-fast: false + matrix: + include: + - name: DSF POC - SONAR + workspace: azure_cli- + + name: '${{ matrix.name }}' + runs-on: ubuntu-latest + env: + EXAMPLE_DIR: ./examples/azure/dsf_deployment + environment: test + + # Use the Bash shell regardless whether the GitHub Actions runner is ubuntu-latest, macos-latest, or windows-latest + defaults: + run: + shell: bash + steps: + + - name: Pick ref + run: | + if [ -z "${{ inputs.explicit_ref }}" ]; then + echo REF=${{ github.ref }} >> $GITHUB_ENV; + else + echo REF=${{ inputs.explicit_ref }} >> $GITHUB_ENV; + fi + + - name: Set Workspace Name + run: | + echo "Event Name: ${{ github.event_name }}" + if [ ${{ github.event_name }} == 'schedule' ]; then + echo TF_WORKSPACE=${{ matrix.workspace }}${{ github.event_name }}-$REF >> $GITHUB_ENV + else + echo TF_WORKSPACE=${{ matrix.workspace }}${{ github.run_number }}-${{ github.run_attempt }}-${{ inputs.explicit_ref }} >> $GITHUB_ENV + echo TMP_WORKSPACE_NAME=${{ matrix.workspace }}${{ github.run_number }}-${{ github.run_attempt }}-${{ inputs.explicit_ref }} >> $GITHUB_ENV + fi + + # Checkout the repository to the GitHub Actions runner + - name: Checkout + uses: actions/checkout@v3 + with: + ref: ${{ env.REF }} + + - name: Change the modules source to local + if: ${{ inputs.use_modules_from_terraform_registry == false }} + run: | + find $EXAMPLE_DIR -type f -exec sed -i -f sed.expr {} \; + + - name: Sets env vars for environment + run: | + echo "TF_VAR_tarball_s3_bucket=0ed58e18-0c0c-11ed-861d-0242ac120003" >> $GITHUB_ENV + if: github.ref != 'refs/heads/"master"' + + - name: Create terraform backend file + run: | + cat << EOF > $EXAMPLE_DIR/backend.tf + terraform { + backend "s3" { + bucket = "terraform-state-bucket-dsfkit-github-tests" + key = "states/terraform.tfstate" + dynamodb_table = "terraform-state-lock" + region = "us-east-1" + } + } + EOF + - name: Create terraform.tfvars file + run: | + cat << EOF > $EXAMPLE_DIR/terraform.tfvars + resource_group_location = "East US" + tarball_location = { + az_resource_group = "eytan-resource-group" + az_storage_account = "eytanstorageaccount" + az_container = "sonar" + az_blob = "jsonar-4.12.0.10.0.tar.gz" + } + EOF + + # Install the latest version of Terraform CLI and configure the Terraform CLI configuration file with a Terraform Cloud user API token + - name: Setup Terraform + uses: hashicorp/setup-terraform@v2 + with: + terraform_wrapper: false + terraform_version: ~1.6.0 + + - name: Setup jq + uses: sergeysova/jq-action@v2 + + # Initialize a new or existing Terraform working directory by creating initial files, loading any remote state, downloading modules, etc. + - name: Terraform Init + run: terraform -chdir=$EXAMPLE_DIR init + env: + TF_WORKSPACE: default + + - name: Cleaning environment + run: | + if [ ${{ github.event_name }} == 'schedule' ]; then + mv $EXAMPLE_DIR/main.tf{,_} + mv $EXAMPLE_DIR/outputs.tf{,_} + mv $EXAMPLE_DIR/sonar.tf{,_} + mv $EXAMPLE_DIR/networking.tf{,_} + terraform -chdir=$EXAMPLE_DIR destroy -auto-approve + mv $EXAMPLE_DIR/main.tf{_,} + mv $EXAMPLE_DIR/outputs.tf{_,} + mv $EXAMPLE_DIR/sonar.tf{_,} + mv $EXAMPLE_DIR/networking.tf{_,} + fi + + - name: Terraform Validate + run: terraform -chdir=$EXAMPLE_DIR validate + + # Generates an execution plan for Terraform + - name: Terraform Plan + run: | + terraform -chdir=$EXAMPLE_DIR workspace list + terraform -chdir=$EXAMPLE_DIR plan + + # On push to "main", build or change infrastructure according to Terraform configuration files + # Note: It is recommended to set up a required "strict" status check in your repository for "Terraform Cloud". See the documentation on "strict" required status checks for more information: https://help.github.com/en/github/administering-a-repository/types-of-required-status-checks + - name: Terraform Apply + id: apply + # if: github.ref == 'refs/heads/"master"' && github.event_name == 'push' || github.event_name == 'workflow_dispatch' + run: terraform -chdir=$EXAMPLE_DIR apply -auto-approve + + - name: Terraform Output + if: always() + run: terraform -chdir=$EXAMPLE_DIR output -json + + - name: Collect Artifacts + if: always() + uses: actions/upload-artifact@v2 + with: + name: collected-keys + path: | + ${{ env.EXAMPLE_DIR }}/ssh_keys + + - name: Terraform Destroy + id: destroy + if: always() + run: | + if [ '${{ steps.apply.conclusion }}' == 'success' ] || [ ${{ github.event_name }} != 'schedule' ]; then + terraform -chdir=$EXAMPLE_DIR destroy -auto-approve + fi + + - name: Terraform Delete Workspace + if: always() + run: | + if [ '${{ steps.destroy.conclusion }}' == 'success' ] && [ ${{ github.event_name }} != 'schedule' ]; then + terraform -chdir=$EXAMPLE_DIR workspace delete $TMP_WORKSPACE_NAME + fi + env: + TF_WORKSPACE: default + + - name: Check how was the workflow run + id: check-trigger + if: ${{ failure() }} + run: | + if [ "${{ github.event_name }}" == "schedule" ]; then + echo "run-by=Automation" >> $GITHUB_OUTPUT + else + echo "run-by=${{ github.actor }}" >> $GITHUB_OUTPUT + fi + + # Send job failure to Slack + - name: Send Slack When Failure + run: | + if [ ${{ env.REF }} == 'master' ]; then + curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} Prod ${{ inputs.workspace }} automation Failed*\n\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#dsfkit-prod"}' ${{ secrets.SLACK_WEBHOOK_URL }} + elif [ ${{ env.REF }} == 'dev' ]; then + curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} dev ${{ inputs.workspace }} automation Failed*\n\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }} + else + curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} private branch ${{ inputs.workspace }} automation Failed*\n\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }} + fi + if: ${{ failure() }} diff --git a/.github/workflows/nightly_manager.yml b/.github/workflows/nightly_manager.yml index 43958bd90..1a1a43c4e 100644 --- a/.github/workflows/nightly_manager.yml +++ b/.github/workflows/nightly_manager.yml @@ -121,3 +121,25 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} DAM_LICENSE: ${{ secrets.DAM_LICENSE }} + + master_dsf_poc_azure: + uses: imperva/dsfkit/.github/workflows/dsf_poc_cli_azure.yml@master + with: + use_modules_from_terraform_registry: false + explicit_ref: master + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + + dev_dsf_poc_azure: + uses: imperva/dsfkit/.github/workflows/dsf_poc_cli_azure.yml@dev + with: + use_modules_from_terraform_registry: false + explicit_ref: dev + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/sonar_poc_cli.yml b/.github/workflows/sonar_poc_cli.yml index 110a5de2c..dad830742 100644 --- a/.github/workflows/sonar_poc_cli.yml +++ b/.github/workflows/sonar_poc_cli.yml @@ -119,7 +119,7 @@ jobs: - name: Change the modules source to local if: ${{ inputs.use_modules_from_terraform_registry == false }} run: | - find ./examples/ -type f -exec sed -i -f sed.expr {} \; + find $EXAMPLE_DIR -type f -exec sed -i -f sed.expr {} \; - name: Sets env vars for environment run: | From 60eb1cb9e49d9e2bb81a14ab9f11567d25a433ce Mon Sep 17 00:00:00 2001 From: Sivan Hajbi <117824107+sivan-hajbi-imperva@users.noreply.github.com> Date: Sun, 22 Oct 2023 14:16:11 +0300 Subject: [PATCH 10/13] handle remote executor exceptions (#288) --- .../sonar-upgrader/python_upgrader/main.py | 113 +++++++++++------- 1 file changed, 72 insertions(+), 41 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/main.py b/modules/aws/sonar-upgrader/python_upgrader/main.py index 03f63ca4f..e42ddda4a 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/main.py @@ -420,20 +420,28 @@ def run_preflight_validations_for_extended_node(extended_node, target_version, s else: return False - upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.RUNNING_PREFLIGHT_VALIDATIONS) - preflight_validations_result = run_preflight_validations(extended_node.get('dsf_node'), - extended_node.get('dsf_node_name'), target_version, - script_file_name, python_location) - if are_preflight_validations_passed(preflight_validations_result): - print(f"### Preflight validations passed for {extended_node.get('dsf_node_name')}") + error_message = None + try: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.PREFLIGHT_VALIDATIONS_SUCCEEDED) - else: - print(f"### Preflight validations didn't pass for {extended_node.get('dsf_node_name')}") + UpgradeStatus.RUNNING_PREFLIGHT_VALIDATIONS) + preflight_validations_result = run_preflight_validations(extended_node.get('dsf_node'), + extended_node.get('dsf_node_name'), target_version, + script_file_name, python_location) + if are_preflight_validations_passed(preflight_validations_result): + print(f"### Preflight validations passed for {extended_node.get('dsf_node_name')}") + upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), + UpgradeStatus.PREFLIGHT_VALIDATIONS_SUCCEEDED) + else: + print(f"### Preflight validations didn't pass for {extended_node.get('dsf_node_name')}") + error_message = preflight_validations_result + except Exception as ex: + print(f"### Preflight validations for {extended_node.get('dsf_node_name')} failed with exception: {str(ex)}") + error_message = str(ex) + + if error_message is not None: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), UpgradeStatus.PREFLIGHT_VALIDATIONS_FAILED, - preflight_validations_result) + error_message) if stop_on_failure: raise UpgradeException(f"Preflight validations didn't pass for {extended_node.get('dsf_node_id')}") else: @@ -634,21 +642,33 @@ def upgrade_dsf_node(extended_node, target_version, upgrade_script_file_name, st print(f"Running upgrade for {extended_node.get('dsf_node_name')}") print(f"You may follow the upgrade process in the DSF node by running SSH to it and looking at " f"/var/log/upgrade.log. When the DSF node's upgrade will complete, this log will also appear here.") - upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.RUNNING_UPGRADE) - success, script_output = run_upgrade_script(extended_node.get('dsf_node'), target_version, tarball_location, - upgrade_script_file_name) - if success: - print(f"Upgrading {extended_node.get('dsf_node_name')} was ### successful ###") + + error_message = None + try: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.UPGRADE_SUCCEEDED) - else: - print(f"Upgrading {extended_node.get('dsf_node_name')} ### failed ### ") + UpgradeStatus.RUNNING_UPGRADE) + success, script_output = run_upgrade_script(extended_node.get('dsf_node'), target_version, tarball_location, + upgrade_script_file_name) + if success: + print(f"Upgrading {extended_node.get('dsf_node_name')} was ### successful ###") + upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), + UpgradeStatus.UPGRADE_SUCCEEDED) + else: + print(f"Upgrading {extended_node.get('dsf_node_name')} ### failed ### ") + error_message = script_output + except Exception as ex: + print(f"Upgrading {extended_node.get('dsf_node_name')} ### failed ### with exception: {str(ex)}") + error_message = str(ex) + + if error_message is not None: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.UPGRADE_FAILED, script_output) + UpgradeStatus.UPGRADE_FAILED, error_message) if stop_on_failure: - raise UpgradeException(f"Upgrading {extended_node.get('dsf_node_name')} ### failed ### ") - return success + raise UpgradeException(f"Upgrading {extended_node.get('dsf_node_name')} ### failed ###") + else: + return False + + return True def run_upgrade_script(dsf_node, target_version, tarball_location, upgrade_script_file_name): @@ -709,29 +729,40 @@ def run_postflight_validations(extended_node, target_version, script_file_name, print(f"Running postflight validations for {extended_node.get('dsf_node_name')}") print(f"Python location (taken from dictionary) in {extended_node.get('dsf_node_name')} is {python_location}") - upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.RUNNING_POSTFLIGHT_VALIDATIONS) - postflight_validations_result_json = run_postflight_validations_script(extended_node.get('dsf_node'), - target_version, python_location, - script_file_name) - postflight_validations_result = json.loads(postflight_validations_result_json) - print(f"Postflight validations result in {extended_node.get('dsf_node_name')} is {postflight_validations_result}") - - passed = are_postflight_validations_passed(postflight_validations_result) - if passed: - print(f"### Postflight validations passed for {extended_node.get('dsf_node_name')}") - upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.POSTFLIGHT_VALIDATIONS_SUCCEEDED) + error_message = None + try: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.SUCCEEDED) - else: - print(f"### Postflight validations didn't pass for {extended_node.get('dsf_node_name')}") + UpgradeStatus.RUNNING_POSTFLIGHT_VALIDATIONS) + postflight_validations_result_json = run_postflight_validations_script(extended_node.get('dsf_node'), + target_version, python_location, + script_file_name) + postflight_validations_result = json.loads(postflight_validations_result_json) + print(f"Postflight validations result in {extended_node.get('dsf_node_name')} is {postflight_validations_result}") + + passed = are_postflight_validations_passed(postflight_validations_result) + if passed: + print(f"### Postflight validations passed for {extended_node.get('dsf_node_name')}") + upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), + UpgradeStatus.POSTFLIGHT_VALIDATIONS_SUCCEEDED) + upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), + UpgradeStatus.SUCCEEDED) + else: + print(f"### Postflight validations didn't pass for {extended_node.get('dsf_node_name')}") + error_message = postflight_validations_result + except Exception as ex: + print(f"### Postflight validations for {extended_node.get('dsf_node_name')} failed with exception: {str(ex)}") + error_message = str(ex) + + if error_message is not None: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), UpgradeStatus.POSTFLIGHT_VALIDATIONS_FAILED, - postflight_validations_result) + error_message) if stop_on_failure: raise UpgradeException(f"Postflight validations didn't pass for {extended_node.get('dsf_node_id')}") - return passed + else: + return False + + return True def run_postflight_validations_script(dsf_node, target_version, python_location, script_file_name): From 024bc79a6a864ef528ff0a75b74929dcd1e9b28d Mon Sep 17 00:00:00 2001 From: Eytan Naim Date: Mon, 23 Oct 2023 09:15:40 +0300 Subject: [PATCH 11/13] New usage stats variable --- modules/aws/agent-gw/main.tf | 1 + modules/aws/agent-gw/variables.tf | 6 ++++++ modules/aws/agentless-gw/main.tf | 1 + modules/aws/agentless-gw/variables.tf | 6 ++++++ modules/aws/dam-base-instance/userdata.tf | 5 ++++- modules/aws/dam-base-instance/variables.tf | 5 +++++ modules/aws/dra-admin/main.tf | 2 ++ modules/aws/dra-admin/variables.tf | 6 ++++++ modules/aws/dra-analytics/main.tf | 5 ++++- modules/aws/dra-analytics/variables.tf | 6 ++++++ modules/aws/hub/main.tf | 1 + modules/aws/hub/variables.tf | 6 ++++++ modules/aws/mx/main.tf | 1 + modules/aws/mx/variables.tf | 6 ++++++ modules/aws/sonar-base-instance/userdata.tf | 5 ++++- modules/aws/sonar-base-instance/variables.tf | 5 +++++ modules/azurerm/agentless-gw/main.tf | 1 + modules/azurerm/agentless-gw/variables.tf | 6 ++++++ modules/azurerm/hub/main.tf | 1 + modules/azurerm/hub/variables.tf | 5 +++++ modules/azurerm/sonar-base-instance/userdata.tf | 5 ++++- modules/azurerm/sonar-base-instance/variables.tf | 4 ++++ modules/null/statistics/main.tf | 4 ++-- 23 files changed, 87 insertions(+), 6 deletions(-) diff --git a/modules/aws/agent-gw/main.tf b/modules/aws/agent-gw/main.tf index e019b1d08..cbd4c4640 100644 --- a/modules/aws/agent-gw/main.tf +++ b/modules/aws/agent-gw/main.tf @@ -94,4 +94,5 @@ module "agent_gw" { } attach_persistent_public_ip = false tags = var.tags + send_usage_statistics = var.send_usage_statistics } diff --git a/modules/aws/agent-gw/variables.tf b/modules/aws/agent-gw/variables.tf index 674b56bb9..572d7f03f 100644 --- a/modules/aws/agent-gw/variables.tf +++ b/modules/aws/agent-gw/variables.tf @@ -272,3 +272,9 @@ variable "tags" { type = map(string) default = {} } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/aws/agentless-gw/main.tf b/modules/aws/agentless-gw/main.tf index 017491bd3..941484026 100644 --- a/modules/aws/agentless-gw/main.tf +++ b/modules/aws/agentless-gw/main.tf @@ -60,4 +60,5 @@ module "gw_instance" { volume_attachment_device_name = var.volume_attachment_device_name tags = var.tags base_directory = var.base_directory + send_usage_statistics = var.send_usage_statistics } diff --git a/modules/aws/agentless-gw/variables.tf b/modules/aws/agentless-gw/variables.tf index 79ef2d78a..47457a4c0 100644 --- a/modules/aws/agentless-gw/variables.tf +++ b/modules/aws/agentless-gw/variables.tf @@ -243,3 +243,9 @@ variable "base_directory" { default = "/imperva" description = "The base directory where all Sonar related directories will be installed" } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/aws/dam-base-instance/userdata.tf b/modules/aws/dam-base-instance/userdata.tf index 639583584..c5654939b 100644 --- a/modules/aws/dam-base-instance/userdata.tf +++ b/modules/aws/dam-base-instance/userdata.tf @@ -33,6 +33,8 @@ data "aws_region" "current" {} module "statistics" { source = "../../../modules/aws/statistics" + count = var.send_usage_statistics ? 1 : 0 + deployment_name = var.name product = "DAM" resource_type = var.resource_type @@ -77,8 +79,9 @@ resource "null_resource" "readiness" { module "statistics_success" { source = "../../../modules/aws/statistics" + count = var.send_usage_statistics ? 1 : 0 - id = module.statistics.id + id = module.statistics[0].id status = "success" depends_on = [null_resource.readiness] } diff --git a/modules/aws/dam-base-instance/variables.tf b/modules/aws/dam-base-instance/variables.tf index afa709c62..a3e3152cf 100644 --- a/modules/aws/dam-base-instance/variables.tf +++ b/modules/aws/dam-base-instance/variables.tf @@ -189,3 +189,8 @@ variable "tags" { type = map(string) default = {} } + +variable "send_usage_statistics" { + type = bool + description = "Set to true to send usage statistics." +} diff --git a/modules/aws/dra-admin/main.tf b/modules/aws/dra-admin/main.tf index 5d47da33f..b23818986 100644 --- a/modules/aws/dra-admin/main.tf +++ b/modules/aws/dra-admin/main.tf @@ -60,6 +60,8 @@ resource "aws_network_interface" "eni" { module "statistics" { source = "../../../modules/aws/statistics" + count = var.send_usage_statistics ? 1 : 0 + deployment_name = var.friendly_name product = "DRA" resource_type = "dra-admin" diff --git a/modules/aws/dra-admin/variables.tf b/modules/aws/dra-admin/variables.tf index d659c7edc..3d801d783 100644 --- a/modules/aws/dra-admin/variables.tf +++ b/modules/aws/dra-admin/variables.tf @@ -188,3 +188,9 @@ variable "instance_profile_name" { default = null description = "Instance profile to assign to the instance. Keep empty if you wish to create a new IAM role and profile" } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/aws/dra-analytics/main.tf b/modules/aws/dra-analytics/main.tf index b1caabc2c..526a2657f 100644 --- a/modules/aws/dra-analytics/main.tf +++ b/modules/aws/dra-analytics/main.tf @@ -57,6 +57,8 @@ resource "aws_network_interface" "eni" { module "statistics" { source = "../../../modules/aws/statistics" + count = var.send_usage_statistics ? 1 : 0 + deployment_name = var.friendly_name product = "DRA" resource_type = "dra-analytics" @@ -76,8 +78,9 @@ resource "null_resource" "readiness" { module "statistics_success" { source = "../../../modules/aws/statistics" + count = var.send_usage_statistics ? 1 : 0 - id = module.statistics.id + id = module.statistics[0].id status = "success" depends_on = [null_resource.readiness] } diff --git a/modules/aws/dra-analytics/variables.tf b/modules/aws/dra-analytics/variables.tf index 9003c8a77..f31b8c216 100644 --- a/modules/aws/dra-analytics/variables.tf +++ b/modules/aws/dra-analytics/variables.tf @@ -203,3 +203,9 @@ variable "instance_profile_name" { default = null description = "Instance profile to assign to the instance. Keep empty if you wish to create a new IAM role and profile" } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/aws/hub/main.tf b/modules/aws/hub/main.tf index 0e223a6f0..bb5340467 100644 --- a/modules/aws/hub/main.tf +++ b/modules/aws/hub/main.tf @@ -69,4 +69,5 @@ module "hub_instance" { volume_attachment_device_name = var.volume_attachment_device_name tags = var.tags base_directory = var.base_directory + send_usage_statistics = var.send_usage_statistics } diff --git a/modules/aws/hub/variables.tf b/modules/aws/hub/variables.tf index c873d1f3d..66ab3face 100644 --- a/modules/aws/hub/variables.tf +++ b/modules/aws/hub/variables.tf @@ -292,3 +292,9 @@ variable "base_directory" { default = "/imperva" description = "The base directory where all Sonar related directories will be installed" } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/aws/mx/main.tf b/modules/aws/mx/main.tf index 61fc33a09..68e8ec2c1 100644 --- a/modules/aws/mx/main.tf +++ b/modules/aws/mx/main.tf @@ -94,4 +94,5 @@ module "mx" { timeout = local.timeout } tags = var.tags + send_usage_statistics = var.send_usage_statistics } diff --git a/modules/aws/mx/variables.tf b/modules/aws/mx/variables.tf index 3f4fde850..79d9179e9 100644 --- a/modules/aws/mx/variables.tf +++ b/modules/aws/mx/variables.tf @@ -267,3 +267,9 @@ variable "tags" { type = map(string) default = {} } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/aws/sonar-base-instance/userdata.tf b/modules/aws/sonar-base-instance/userdata.tf index 95dd625cc..936fdc6cd 100644 --- a/modules/aws/sonar-base-instance/userdata.tf +++ b/modules/aws/sonar-base-instance/userdata.tf @@ -29,6 +29,8 @@ resource "random_uuid" "jsonar_uuid" {} module "statistics" { source = "../../../modules/aws/statistics" + count = var.send_usage_statistics ? 1 : 0 + deployment_name = var.name product = "SONAR" resource_type = var.resource_type @@ -75,8 +77,9 @@ resource "null_resource" "readiness" { module "statistics_success" { source = "../../../modules/aws/statistics" + count = var.send_usage_statistics ? 1 : 0 - id = module.statistics.id + id = module.statistics[0].id status = "success" depends_on = [null_resource.readiness] } diff --git a/modules/aws/sonar-base-instance/variables.tf b/modules/aws/sonar-base-instance/variables.tf index 658ee9c6c..8b22af22a 100644 --- a/modules/aws/sonar-base-instance/variables.tf +++ b/modules/aws/sonar-base-instance/variables.tf @@ -220,3 +220,8 @@ variable "base_directory" { default = "/imperva" description = "The base directory where all Sonar related directories will be installed" } + +variable "send_usage_statistics" { + type = bool + description = "Set to true to send usage statistics." +} diff --git a/modules/azurerm/agentless-gw/main.tf b/modules/azurerm/agentless-gw/main.tf index 1779212c0..036bf4fb2 100644 --- a/modules/azurerm/agentless-gw/main.tf +++ b/modules/azurerm/agentless-gw/main.tf @@ -58,4 +58,5 @@ module "gw_instance" { sonarw_private_key_secret_name = var.sonarw_private_key_secret_name sonarw_public_key_content = var.sonarw_public_key_content tags = var.tags + send_usage_statistics = var.send_usage_statistics } diff --git a/modules/azurerm/agentless-gw/variables.tf b/modules/azurerm/agentless-gw/variables.tf index 148ee608b..69bfca39e 100644 --- a/modules/azurerm/agentless-gw/variables.tf +++ b/modules/azurerm/agentless-gw/variables.tf @@ -223,3 +223,9 @@ variable "sonarw_public_key_content" { default = null description = "The Agentless Gateway sonarw user public key - used for remote Agentless Gateway federation, HADR, etc." } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/azurerm/hub/main.tf b/modules/azurerm/hub/main.tf index 79e4dc912..36fc475f9 100644 --- a/modules/azurerm/hub/main.tf +++ b/modules/azurerm/hub/main.tf @@ -67,4 +67,5 @@ module "hub_instance" { sonarw_private_key_secret_name = var.sonarw_private_key_secret_name sonarw_public_key_content = var.sonarw_public_key_content tags = var.tags + send_usage_statistics = var.send_usage_statistics } \ No newline at end of file diff --git a/modules/azurerm/hub/variables.tf b/modules/azurerm/hub/variables.tf index 31e90b403..9b4b9f2b2 100644 --- a/modules/azurerm/hub/variables.tf +++ b/modules/azurerm/hub/variables.tf @@ -273,3 +273,8 @@ variable "mx_details" { default = [] } +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to send usage statistics." +} diff --git a/modules/azurerm/sonar-base-instance/userdata.tf b/modules/azurerm/sonar-base-instance/userdata.tf index be4552876..266e074d4 100644 --- a/modules/azurerm/sonar-base-instance/userdata.tf +++ b/modules/azurerm/sonar-base-instance/userdata.tf @@ -29,6 +29,8 @@ resource "random_uuid" "jsonar_uuid" {} module "statistics" { source = "../../../modules/azurerm/statistics" + count = var.send_usage_statistics ? 1 : 0 + deployment_name = var.name product = "SONAR" resource_type = var.resource_type @@ -76,8 +78,9 @@ resource "null_resource" "readiness" { module "statistics_success" { source = "../../../modules/azurerm/statistics" + count = var.send_usage_statistics ? 1 : 0 - id = module.statistics.id + id = module.statistics[0].id status = "success" depends_on = [null_resource.readiness] } diff --git a/modules/azurerm/sonar-base-instance/variables.tf b/modules/azurerm/sonar-base-instance/variables.tf index 409205c43..05661bc46 100644 --- a/modules/azurerm/sonar-base-instance/variables.tf +++ b/modules/azurerm/sonar-base-instance/variables.tf @@ -202,3 +202,7 @@ variable "generate_access_tokens" { description = "Generate access tokens for connecting to USC / connect DAM to the DSF Hub" } +variable "send_usage_statistics" { + type = bool + description = "Set to true to send usage statistics." +} diff --git a/modules/null/statistics/main.tf b/modules/null/statistics/main.tf index 8133757f0..738c09e28 100644 --- a/modules/null/statistics/main.tf +++ b/modules/null/statistics/main.tf @@ -1,6 +1,6 @@ locals { - host = "https://hh5n8k57bh.execute-api.us-east-1.amazonaws.com" - resource = "dsfkit_usage_stats" + host = "https://etnoe723me.execute-api.us-east-1.amazonaws.com" + resource = "dsfkit_stats_dev" stage = "dev" url = join("/", [local.host, local.stage, local.resource]) header_value = base64decode("amxEU0NhRWwyTTc2NkZVWUtVTFhlNUxyRVpBYVR4akhHd0dGYkExYg==") From 5fbcb5a5e7dab597889081254391cb5c3ab73053 Mon Sep 17 00:00:00 2001 From: Eytan Naim Date: Mon, 23 Oct 2023 10:05:57 +0300 Subject: [PATCH 12/13] Azure example README --- examples/azure/dsf_deployment/README.md | 31 +++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 examples/azure/dsf_deployment/README.md diff --git a/examples/azure/dsf_deployment/README.md b/examples/azure/dsf_deployment/README.md new file mode 100644 index 000000000..b63320301 --- /dev/null +++ b/examples/azure/dsf_deployment/README.md @@ -0,0 +1,31 @@ +# DSF Deployment example +[![GitHub tag](https://img.shields.io/github/v/tag/imperva/dsfkit.svg)](https://github.com/imperva/dsfkit/tags) + +This example provides DSF (Data Security Fabric) deployment with DSF Hub, and Agentless Gateways. + +## Modularity +The deployment is modular and allows users to deploy one or more of the following modules: + +1. New VPC +2. Sonar + - DSF Hub + - DSF Hub DR HADR (High Availability Disaster Recovery) node + - Agentless Gateways + - Agentless Gateways DR HADR (High Availability Disaster Recovery) nodes + +## Variables +Several variables in the `variables.tf` file are important for configuring the deployment. The following variables dictate the deployment content and should be paid more attention to: +- `enable_sonar`: Enable Sonar sub-product +- `agent_gw_count`: Number of Agent Gateways +- `hub_hadr`: Enable DSF Hub High Availability Disaster Recovery (HADR) +- `agentless_gw_hadr`: Enable Agentless Gateway High Availability Disaster Recovery (HADR) + +### Networking +- `subnet_ids`: IDs of the subnets for the deployment. If not specified, a new vpc is created. + +## Default Example +To perform the default deployment, run the following command: + +```bash +terraform apply -auto-approve +``` \ No newline at end of file From 056c58c7c609bb24bf53077ad8dd939d0c90f42f Mon Sep 17 00:00:00 2001 From: Eytan Naim Date: Mon, 23 Oct 2023 10:13:30 +0300 Subject: [PATCH 13/13] Add stats variable to installation examples --- examples/installation/dsf_single_account_deployment/dam.tf | 2 ++ examples/installation/dsf_single_account_deployment/dra.tf | 2 ++ .../installation/dsf_single_account_deployment/sonar.tf | 4 ++++ .../installation/dsf_single_account_deployment/variables.tf | 6 ++++++ .../installation/sonar_multi_account_deployment/main.tf | 4 ++++ .../sonar_multi_account_deployment/variables.tf | 6 ++++++ .../installation/sonar_single_account_deployment/main.tf | 3 +++ .../sonar_single_account_deployment/variables.tf | 6 ++++++ 8 files changed, 33 insertions(+) diff --git a/examples/installation/dsf_single_account_deployment/dam.tf b/examples/installation/dsf_single_account_deployment/dam.tf index 58d5efbb7..b3ef2bfac 100644 --- a/examples/installation/dsf_single_account_deployment/dam.tf +++ b/examples/installation/dsf_single_account_deployment/dam.tf @@ -33,6 +33,7 @@ module "mx" { } : null large_scale_mode = var.large_scale_mode.mx tags = local.tags + send_usage_statistics = var.send_usage_statistics } module "agent_gw" { @@ -59,6 +60,7 @@ module "agent_gw" { large_scale_mode = var.large_scale_mode.agent_gw gateway_group_name = local.gateway_group_name tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.provider-2 } diff --git a/examples/installation/dsf_single_account_deployment/dra.tf b/examples/installation/dsf_single_account_deployment/dra.tf index 04c03ee9e..a4ed768e0 100644 --- a/examples/installation/dsf_single_account_deployment/dra.tf +++ b/examples/installation/dsf_single_account_deployment/dra.tf @@ -23,6 +23,7 @@ module "dra_admin" { key_pair = local.dra_admin_public_key_name instance_profile_name = var.dra_admin_instance_profile_name tags = local.tags + send_usage_statistics = var.send_usage_statistics } module "dra_analytics" { @@ -46,6 +47,7 @@ module "dra_analytics" { admin_server_private_ip = module.dra_admin[0].private_ip admin_server_public_ip = module.dra_admin[0].public_ip tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.provider-2 } diff --git a/examples/installation/dsf_single_account_deployment/sonar.tf b/examples/installation/dsf_single_account_deployment/sonar.tf index 805ee506e..754cb2197 100644 --- a/examples/installation/dsf_single_account_deployment/sonar.tf +++ b/examples/installation/dsf_single_account_deployment/sonar.tf @@ -52,6 +52,7 @@ module "hub_main" { }] : [] generate_access_tokens = true tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.provider-1 } @@ -96,6 +97,7 @@ module "hub_dr" { base_directory = var.sonar_machine_base_directory generate_access_tokens = true tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.provider-1 } @@ -160,6 +162,7 @@ module "agentless_gw_main" { instance_profile_name = var.agentless_gw_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.provider-2 } @@ -202,6 +205,7 @@ module "agentless_gw_dr" { instance_profile_name = var.agentless_gw_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.provider-2 } diff --git a/examples/installation/dsf_single_account_deployment/variables.tf b/examples/installation/dsf_single_account_deployment/variables.tf index 262bbdd0a..1b4bc2af5 100644 --- a/examples/installation/dsf_single_account_deployment/variables.tf +++ b/examples/installation/dsf_single_account_deployment/variables.tf @@ -620,3 +620,9 @@ variable "dra_analytics_instance_profile_name" { description = "Instance profile to assign to the DRA Analytics EC2. Keep empty if you wish to create a new instance profile." default = null } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to enable sending usage statistics, or false to disable." +} diff --git a/examples/installation/sonar_multi_account_deployment/main.tf b/examples/installation/sonar_multi_account_deployment/main.tf index 99596349c..a678f90d4 100644 --- a/examples/installation/sonar_multi_account_deployment/main.tf +++ b/examples/installation/sonar_multi_account_deployment/main.tf @@ -136,6 +136,7 @@ module "hub_main" { instance_profile_name = var.hub_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.hub-main } @@ -176,6 +177,7 @@ module "hub_dr" { instance_profile_name = var.hub_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.hub-dr } @@ -214,6 +216,7 @@ module "agentless_gw_main" { instance_profile_name = var.gw_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.gw-main } @@ -255,6 +258,7 @@ module "agentless_gw_dr" { instance_profile_name = var.gw_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics providers = { aws = aws.gw-dr } diff --git a/examples/installation/sonar_multi_account_deployment/variables.tf b/examples/installation/sonar_multi_account_deployment/variables.tf index bbe92209e..3f5072c3c 100644 --- a/examples/installation/sonar_multi_account_deployment/variables.tf +++ b/examples/installation/sonar_multi_account_deployment/variables.tf @@ -346,3 +346,9 @@ variable "sonar_machine_base_directory" { default = "/imperva" description = "The base directory where all Sonar related directories will be installed" } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to enable sending usage statistics, or false to disable." +} diff --git a/examples/installation/sonar_single_account_deployment/main.tf b/examples/installation/sonar_single_account_deployment/main.tf index 3f7e12b9b..6f8cf8d4b 100644 --- a/examples/installation/sonar_single_account_deployment/main.tf +++ b/examples/installation/sonar_single_account_deployment/main.tf @@ -100,6 +100,7 @@ module "hub_main" { instance_profile_name = var.hub_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics } module "hub_dr" { @@ -133,6 +134,7 @@ module "hub_dr" { instance_profile_name = var.hub_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics } module "agentless_gw" { @@ -167,6 +169,7 @@ module "agentless_gw" { instance_profile_name = var.gw_instance_profile_name base_directory = var.sonar_machine_base_directory tags = local.tags + send_usage_statistics = var.send_usage_statistics } module "hub_hadr" { diff --git a/examples/installation/sonar_single_account_deployment/variables.tf b/examples/installation/sonar_single_account_deployment/variables.tf index b5b449563..3b487d2c3 100644 --- a/examples/installation/sonar_single_account_deployment/variables.tf +++ b/examples/installation/sonar_single_account_deployment/variables.tf @@ -265,3 +265,9 @@ variable "sonar_machine_base_directory" { default = "/imperva" description = "The base directory where all Sonar related directories will be installed" } + +variable "send_usage_statistics" { + type = bool + default = true + description = "Set to true to enable sending usage statistics, or false to disable." +}