From 77c510a8cd0bdfb06d884dad20cd28d971396137 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Tue, 14 Nov 2023 18:12:38 +0200 Subject: [PATCH 01/12] EDSF-456 Fix release push to dev and master --- .github/workflows/release.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ea0ddc7ca..aa1bd1446 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,6 +66,7 @@ jobs: with: fetch-depth: 0 ref: ${{ env.src_branch }} + token: ${{ secrets.PUSH_TO_PROTECTED_BRANCH }} - name: Setup Terraform uses: hashicorp/setup-terraform@v2 @@ -141,7 +142,7 @@ jobs: with: fetch-depth: 0 ref: ${{ env.dst_branch }} - token: ${{ secrets.PUSH_TO_OTHER_REPOS_TOKEN_ADMIN }} + token: ${{ secrets.PUSH_TO_PROTECTED_BRANCH }} - name: Merge run: | From a1f5202ff44f4462fb9f3857d604cc61d4dca2d2 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Tue, 14 Nov 2023 19:35:42 +0200 Subject: [PATCH 02/12] EDSF-387 Block downgrade in preflight validations + fixes in version validations --- modules/aws/sonar-upgrader/README.md | 8 +- .../python_upgrader/tests/scripts/__init__.py | 0 .../scripts/test_run_preflight_validations.py | 124 ++++++++++++++++++ .../python_upgrader/tests/test_main.py | 4 +- .../python_upgrader/upgrade/main.py | 2 +- .../upgrade/scripts/__init__.py | 0 .../scripts/run_preflight_validations.py | 55 ++++---- .../python_upgrader/upgrade_status.tpl | 2 +- 8 files changed, 156 insertions(+), 39 deletions(-) create mode 100644 modules/aws/sonar-upgrader/python_upgrader/tests/scripts/__init__.py create mode 100644 modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py create mode 100644 modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/__init__.py diff --git a/modules/aws/sonar-upgrader/README.md b/modules/aws/sonar-upgrader/README.md index d6ce5c581..4f6e38858 100644 --- a/modules/aws/sonar-upgrader/README.md +++ b/modules/aws/sonar-upgrader/README.md @@ -50,10 +50,10 @@ The target version is the version you are upgrading to. #### Preflight validations: -1. The source and target versions are different. +1. The target version is higher than the source version. 2. The source version is 4.10 or higher. (eDSF Kit requirement) -3. The upgrade version hop is 1 or 2, e.g., upgrade from 4.10 to 4.12 is supported, and upgrade from 4.10 to 4.13 is not. (Sonar product requirement) -4. There are at least 20GB of free space in the /data directory. +3. The upgrade version hop is 2 or lower, e.g., upgrade from 4.10 to 4.12 is supported, and upgrade from 4.10 to 4.13 is not. (Sonar product requirement) +4. There are at least 20GB of free space in the _/data_ directory. #### Postflight validations: @@ -114,7 +114,7 @@ upgrade_status.json: }, "1.2.3.7": { "status": "Preflight validations failed", - "message": "{\"different_version\": false, \"min_version\": true, \"max_version_hop\": true}" + "message": "{\"higher_target_version\": false, \"min_version\": true, \"max_version_hop\": true}" } }, "target-version": "4.12.0.10.0", diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/__init__.py b/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py new file mode 100644 index 000000000..e520c38ae --- /dev/null +++ b/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py @@ -0,0 +1,124 @@ +# test_run_preflight_validations.py + +# import pytest +from upgrade.scripts.run_preflight_validations import validate_sonar_version + + +def test_validate_sonar_version_2_hop(): + # given + source_version = "4.10.0.0.0" + target_version = "4.12.0.0.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_patch(): + # given + source_version = "4.10.0.0.0" + target_version = "4.12.0.1.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_customer1(): + # given + source_version = "4.10.0.1.3" + target_version = "4.12.0.0.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_patch_downgrade(): + # given + source_version = "4.10.0.1.3" + target_version = "4.10.0.0.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == False + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_minor_downgrade(): + # given + source_version = "4.11.0.0.0" + target_version = "4.10.0.1.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == False + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_from_4_9(): + # given + source_version = "4.9.c_20221129220420" + target_version = "4.10.0.1.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == False + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_3_hop(): + # given + source_version = "4.10.0.0.0" + target_version = "4.13.0.10.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == False + + +def test_validate_sonar_version_downgrade_and_lower(): + # given + source_version = "4.9.c_20221129220420" + target_version = "4.8.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == False + assert min_version_validation_passed == False + assert max_version_hop_validation_passed == True diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py b/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py index 19f806a6e..36c936b71 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py @@ -477,10 +477,10 @@ def mocked_run_remote_script(host, remote_user, remote_key_filename, script_cont if preflight_validations_error_hosts is not None and host in preflight_validations_error_hosts: return "run_preflight_validations error" elif preflight_validations_not_pass_hosts is not None and host in preflight_validations_not_pass_hosts: - return 'Preflight validations result: {"different_version": true, "min_version": true, ' \ + return 'Preflight validations result: {"higher_target_version": true, "min_version": true, ' \ '"max_version_hop": true, "enough_free_disk_space": false}' else: - return 'Preflight validations result: {"different_version": true, "min_version": true, ' \ + return 'Preflight validations result: {"higher_target_version": true, "min_version": true, ' \ '"max_version_hop": true, "enough_free_disk_space": true}' elif "upgrade_v4_10.sh" in script_contents: if upgrade_error_hosts is not None and host in upgrade_error_hosts: diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index c9112e50f..19e99f17d 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -556,7 +556,7 @@ def extract_preflight_validations_result(script_output): def are_preflight_validations_passed(preflight_validations_result): - return preflight_validations_result.get('different_version') \ + return preflight_validations_result.get('higher_target_version') \ and preflight_validations_result.get('min_version') \ and preflight_validations_result.get('max_version_hop') \ and preflight_validations_result.get('enough_free_disk_space') diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/__init__.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py index 2da5579c5..7de3e6869 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py @@ -5,6 +5,7 @@ from decimal import Decimal from datetime import datetime import shutil +from packaging.version import Version, parse def main(target_version): @@ -30,12 +31,12 @@ def try_validate(): def validate(): source_version, data_dir_path = get_sonar_info() - different_version, min_version_validation_passed, max_version_hop_validation_passed = \ + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ validate_sonar_version(source_version, target_version) enough_free_disk_space = validate_disk_space(data_dir_path) result = { - "different_version": different_version, + "higher_target_version": higher_target_version, "min_version": min_version_validation_passed, "max_version_hop": max_version_hop_validation_passed, "enough_free_disk_space": enough_free_disk_space @@ -82,43 +83,35 @@ def validate_data_dir_path_found(data_dir, jsonar_file_path): def validate_sonar_version(source_version, target_version): - different_version = source_version != target_version - if different_version: - source_major_version = extract_major_version(source_version) - target_major_version = extract_major_version(target_version) - min_version_validation_passed = validate_min_version(source_major_version) - max_version_hop_validation_passed = validate_max_version_hop(source_major_version, target_major_version) - - if not min_version_validation_passed or not max_version_hop_validation_passed: - print(f"Sonar version validation failed for source version: {source_version} " - f"and target_version {target_version}") - else: - print("Source and target versions are the same") - min_version_validation_passed = True - max_version_hop_validation_passed = True + higher_target_version_passed = validate_higher_target_version(source_version, target_version) - return different_version, min_version_validation_passed, max_version_hop_validation_passed + min_version_validation_passed = validate_min_version(source_version) + max_version_hop_validation_passed = validate_max_version_hop(source_version, target_version) + if not higher_target_version_passed or not min_version_validation_passed or not max_version_hop_validation_passed: + print(f"Sonar version validation failed for source version: {source_version} " + f"and target_version {target_version}") -# For example, if version is the string "4.12.0.10.0", returns the number 4.12 -def extract_major_version(version): - second_period_index = version.find(".", version.find(".") + 1) - if second_period_index != -1: - major_version_str = version[:second_period_index] - return Decimal(major_version_str) - else: - raise Exception(f"Invalid version format: {version}, must be x.x.x.x.x") + return higher_target_version_passed, min_version_validation_passed, max_version_hop_validation_passed + + +def validate_higher_target_version(source_version, target_version): + return parse(source_version) < parse(target_version) -def validate_min_version(source_major_version): - return source_major_version >= 4.10 +# For example, if version is the string "4.12.0.10.0", major is 4 and minor is 12 +def validate_min_version(source_version): + return Version(source_version).major > 4 or \ + (Version(source_version).major == 4 and Version(source_version).minor >= 10) -def validate_max_version_hop(source_major_version, target_major_version): - # TODO handle when 5.x will be released - hop = round(target_major_version - source_major_version, 2) +def validate_max_version_hop(source_version, target_version): + # TODO handle when 5.x will be released (probably this validation will be removed before then) + source_minor_version = Version(source_version).minor + target_minor_version = Version(target_version).minor + hop = target_minor_version - source_minor_version print(f"Version hop: {hop}") - return hop <= 0.02 + return hop <= 2 def validate_disk_space(data_dir_path): diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl b/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl index 6e19545e6..47d69ead8 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl @@ -12,7 +12,7 @@ }, "1.2.3.7": { "status": "Preflight validations failed", - "message": "{\"different_version\": false, \"min_version\": true, \"max_version_hop\": true}" + "message": "{\"higher_target_version\": false, \"min_version\": true, \"max_version_hop\": true}" } }, "target-version": "4.12.0.10.0" From efea2375f7f2d9cfc7c88b2a2a30180ed26a565c Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Tue, 14 Nov 2023 18:12:38 +0200 Subject: [PATCH 03/12] EDSF-456 Fix release push to dev and master --- .github/workflows/release.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ea0ddc7ca..aa1bd1446 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,6 +66,7 @@ jobs: with: fetch-depth: 0 ref: ${{ env.src_branch }} + token: ${{ secrets.PUSH_TO_PROTECTED_BRANCH }} - name: Setup Terraform uses: hashicorp/setup-terraform@v2 @@ -141,7 +142,7 @@ jobs: with: fetch-depth: 0 ref: ${{ env.dst_branch }} - token: ${{ secrets.PUSH_TO_OTHER_REPOS_TOKEN_ADMIN }} + token: ${{ secrets.PUSH_TO_PROTECTED_BRANCH }} - name: Merge run: | From 099e91c584d486796e364abbb09be86b64519fed Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Tue, 14 Nov 2023 19:35:42 +0200 Subject: [PATCH 04/12] EDSF-387 Block downgrade in preflight validations + fixes in version validations --- modules/aws/sonar-upgrader/README.md | 8 +- .../python_upgrader/tests/scripts/__init__.py | 0 .../scripts/test_run_preflight_validations.py | 124 ++++++++++++++++++ .../python_upgrader/tests/test_main.py | 4 +- .../python_upgrader/upgrade/main.py | 2 +- .../upgrade/scripts/__init__.py | 0 .../scripts/run_preflight_validations.py | 55 ++++---- .../python_upgrader/upgrade_status.tpl | 2 +- 8 files changed, 156 insertions(+), 39 deletions(-) create mode 100644 modules/aws/sonar-upgrader/python_upgrader/tests/scripts/__init__.py create mode 100644 modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py create mode 100644 modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/__init__.py diff --git a/modules/aws/sonar-upgrader/README.md b/modules/aws/sonar-upgrader/README.md index d6ce5c581..4f6e38858 100644 --- a/modules/aws/sonar-upgrader/README.md +++ b/modules/aws/sonar-upgrader/README.md @@ -50,10 +50,10 @@ The target version is the version you are upgrading to. #### Preflight validations: -1. The source and target versions are different. +1. The target version is higher than the source version. 2. The source version is 4.10 or higher. (eDSF Kit requirement) -3. The upgrade version hop is 1 or 2, e.g., upgrade from 4.10 to 4.12 is supported, and upgrade from 4.10 to 4.13 is not. (Sonar product requirement) -4. There are at least 20GB of free space in the /data directory. +3. The upgrade version hop is 2 or lower, e.g., upgrade from 4.10 to 4.12 is supported, and upgrade from 4.10 to 4.13 is not. (Sonar product requirement) +4. There are at least 20GB of free space in the _/data_ directory. #### Postflight validations: @@ -114,7 +114,7 @@ upgrade_status.json: }, "1.2.3.7": { "status": "Preflight validations failed", - "message": "{\"different_version\": false, \"min_version\": true, \"max_version_hop\": true}" + "message": "{\"higher_target_version\": false, \"min_version\": true, \"max_version_hop\": true}" } }, "target-version": "4.12.0.10.0", diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/__init__.py b/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py new file mode 100644 index 000000000..e520c38ae --- /dev/null +++ b/modules/aws/sonar-upgrader/python_upgrader/tests/scripts/test_run_preflight_validations.py @@ -0,0 +1,124 @@ +# test_run_preflight_validations.py + +# import pytest +from upgrade.scripts.run_preflight_validations import validate_sonar_version + + +def test_validate_sonar_version_2_hop(): + # given + source_version = "4.10.0.0.0" + target_version = "4.12.0.0.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_patch(): + # given + source_version = "4.10.0.0.0" + target_version = "4.12.0.1.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_customer1(): + # given + source_version = "4.10.0.1.3" + target_version = "4.12.0.0.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_patch_downgrade(): + # given + source_version = "4.10.0.1.3" + target_version = "4.10.0.0.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == False + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_minor_downgrade(): + # given + source_version = "4.11.0.0.0" + target_version = "4.10.0.1.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == False + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_from_4_9(): + # given + source_version = "4.9.c_20221129220420" + target_version = "4.10.0.1.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == False + assert max_version_hop_validation_passed == True + + +def test_validate_sonar_version_3_hop(): + # given + source_version = "4.10.0.0.0" + target_version = "4.13.0.10.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == True + assert min_version_validation_passed == True + assert max_version_hop_validation_passed == False + + +def test_validate_sonar_version_downgrade_and_lower(): + # given + source_version = "4.9.c_20221129220420" + target_version = "4.8.0" + + # when + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ + validate_sonar_version(source_version, target_version) + + # then + assert higher_target_version == False + assert min_version_validation_passed == False + assert max_version_hop_validation_passed == True diff --git a/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py b/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py index 19f806a6e..36c936b71 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/tests/test_main.py @@ -477,10 +477,10 @@ def mocked_run_remote_script(host, remote_user, remote_key_filename, script_cont if preflight_validations_error_hosts is not None and host in preflight_validations_error_hosts: return "run_preflight_validations error" elif preflight_validations_not_pass_hosts is not None and host in preflight_validations_not_pass_hosts: - return 'Preflight validations result: {"different_version": true, "min_version": true, ' \ + return 'Preflight validations result: {"higher_target_version": true, "min_version": true, ' \ '"max_version_hop": true, "enough_free_disk_space": false}' else: - return 'Preflight validations result: {"different_version": true, "min_version": true, ' \ + return 'Preflight validations result: {"higher_target_version": true, "min_version": true, ' \ '"max_version_hop": true, "enough_free_disk_space": true}' elif "upgrade_v4_10.sh" in script_contents: if upgrade_error_hosts is not None and host in upgrade_error_hosts: diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index c9112e50f..19e99f17d 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -556,7 +556,7 @@ def extract_preflight_validations_result(script_output): def are_preflight_validations_passed(preflight_validations_result): - return preflight_validations_result.get('different_version') \ + return preflight_validations_result.get('higher_target_version') \ and preflight_validations_result.get('min_version') \ and preflight_validations_result.get('max_version_hop') \ and preflight_validations_result.get('enough_free_disk_space') diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/__init__.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py index 2da5579c5..7de3e6869 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py @@ -5,6 +5,7 @@ from decimal import Decimal from datetime import datetime import shutil +from packaging.version import Version, parse def main(target_version): @@ -30,12 +31,12 @@ def try_validate(): def validate(): source_version, data_dir_path = get_sonar_info() - different_version, min_version_validation_passed, max_version_hop_validation_passed = \ + higher_target_version, min_version_validation_passed, max_version_hop_validation_passed = \ validate_sonar_version(source_version, target_version) enough_free_disk_space = validate_disk_space(data_dir_path) result = { - "different_version": different_version, + "higher_target_version": higher_target_version, "min_version": min_version_validation_passed, "max_version_hop": max_version_hop_validation_passed, "enough_free_disk_space": enough_free_disk_space @@ -82,43 +83,35 @@ def validate_data_dir_path_found(data_dir, jsonar_file_path): def validate_sonar_version(source_version, target_version): - different_version = source_version != target_version - if different_version: - source_major_version = extract_major_version(source_version) - target_major_version = extract_major_version(target_version) - min_version_validation_passed = validate_min_version(source_major_version) - max_version_hop_validation_passed = validate_max_version_hop(source_major_version, target_major_version) - - if not min_version_validation_passed or not max_version_hop_validation_passed: - print(f"Sonar version validation failed for source version: {source_version} " - f"and target_version {target_version}") - else: - print("Source and target versions are the same") - min_version_validation_passed = True - max_version_hop_validation_passed = True + higher_target_version_passed = validate_higher_target_version(source_version, target_version) - return different_version, min_version_validation_passed, max_version_hop_validation_passed + min_version_validation_passed = validate_min_version(source_version) + max_version_hop_validation_passed = validate_max_version_hop(source_version, target_version) + if not higher_target_version_passed or not min_version_validation_passed or not max_version_hop_validation_passed: + print(f"Sonar version validation failed for source version: {source_version} " + f"and target_version {target_version}") -# For example, if version is the string "4.12.0.10.0", returns the number 4.12 -def extract_major_version(version): - second_period_index = version.find(".", version.find(".") + 1) - if second_period_index != -1: - major_version_str = version[:second_period_index] - return Decimal(major_version_str) - else: - raise Exception(f"Invalid version format: {version}, must be x.x.x.x.x") + return higher_target_version_passed, min_version_validation_passed, max_version_hop_validation_passed + + +def validate_higher_target_version(source_version, target_version): + return parse(source_version) < parse(target_version) -def validate_min_version(source_major_version): - return source_major_version >= 4.10 +# For example, if version is the string "4.12.0.10.0", major is 4 and minor is 12 +def validate_min_version(source_version): + return Version(source_version).major > 4 or \ + (Version(source_version).major == 4 and Version(source_version).minor >= 10) -def validate_max_version_hop(source_major_version, target_major_version): - # TODO handle when 5.x will be released - hop = round(target_major_version - source_major_version, 2) +def validate_max_version_hop(source_version, target_version): + # TODO handle when 5.x will be released (probably this validation will be removed before then) + source_minor_version = Version(source_version).minor + target_minor_version = Version(target_version).minor + hop = target_minor_version - source_minor_version print(f"Version hop: {hop}") - return hop <= 0.02 + return hop <= 2 def validate_disk_space(data_dir_path): diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl b/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl index 6e19545e6..47d69ead8 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade_status.tpl @@ -12,7 +12,7 @@ }, "1.2.3.7": { "status": "Preflight validations failed", - "message": "{\"different_version\": false, \"min_version\": true, \"max_version_hop\": true}" + "message": "{\"higher_target_version\": false, \"min_version\": true, \"max_version_hop\": true}" } }, "target-version": "4.12.0.10.0" From d06aa137029f8f5bf56d742d3059a8d47be834df Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Wed, 15 Nov 2023 11:36:54 +0200 Subject: [PATCH 05/12] Updated version history - preparation for next release --- README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.md b/README.md index 4964ec88b..695a87445 100644 --- a/README.md +++ b/README.md @@ -364,6 +364,15 @@ The following table lists the _latest_ eDSF Kit releases, their release date and Sonar deployment on Azure Beta release. + + Coming soon + + 1.7.1 + + + Improvements and bug fixes. + + From 153e08d82b08010e0d784738e522d8de48a80ec1 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Wed, 15 Nov 2023 13:44:49 +0200 Subject: [PATCH 06/12] Removed test function from production code --- .../upgrade/scripts/run_preflight_validations.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py index 7de3e6869..d194a0f5b 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/scripts/run_preflight_validations.py @@ -131,16 +131,6 @@ def check_free_space(directory, required_space_gb): return free_space >= required_space_gb -def test(): - # test patch upgrade from 4.10.0.0 to 4.10.0.1 - # is_valid = validate_max_version_hop(4.10, 4.10) - - # test major version upgrade upgrade from 4.10.0.1.3 to 4.12.0.0.0 - is_valid = validate_max_version_hop(4.10, 4.12) - - print(f"is_valid: {is_valid}") - - if __name__ == "__main__": target_version = sys.argv[1] main(target_version) From 5411fa7bb02116f3f5b1cb45e91379e5d62b2c0b Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Wed, 15 Nov 2023 17:19:00 +0200 Subject: [PATCH 07/12] Refactoring: Relocated preparation functions and extracted function - is_empty_run --- .../python_upgrader/upgrade/main.py | 139 ++++++++++-------- 1 file changed, 74 insertions(+), 65 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index 19e99f17d..ba075d5c7 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -25,6 +25,22 @@ # Helper functions +def print_inputs(agentless_gws, hubs, tarball_location, args): + print("List of Agentless Gateways:") + print_hadr_sets(agentless_gws) + print("List of DSF Hubs:") + print_hadr_sets(hubs) + print(f"target_version: {args.target_version}") + print(f"connection_timeout: {args.connection_timeout}") + print(f"test_connection: {args.test_connection}") + print(f"run_preflight_validations: {args.run_preflight_validations}") + print(f"run_upgrade: {args.run_upgrade}") + print(f"run_postflight_validations: {args.run_postflight_validations}") + print(f"clean_old_deployments: {args.clean_old_deployments}") + print(f"stop_on_failure: {args.stop_on_failure}") + print(f"tarball_location: {tarball_location}") + + def build_script_file_path(script_file_name): file_dir = get_current_directory() return join_paths(file_dir, "scripts", script_file_name) @@ -205,10 +221,7 @@ def main(args): upgrade_status_service = init_upgrade_status(extended_nodes, args.target_version) - if not args.test_connection and not args.run_preflight_validations and not args.run_upgrade and \ - not args.run_postflight_validations and not args.clean_old_deployments: - print("All flags are disabled. Nothing to do here.") - print_summary(upgrade_status_service) + if is_empty_run(args, upgrade_status_service): return try: @@ -273,67 +286,13 @@ def init_upgrade_status(extended_nodes, target_version): return upgrade_status_service -def parse_args(): - parser = argparse.ArgumentParser(description="Upgrade script for DSF Hub and Agentless Gateway") - parser.add_argument("--agentless_gws", required=True, help="JSON-encoded Agentless Gateway list") - parser.add_argument("--dsf_hubs", required=True, help="JSON-encoded DSF Hub list") - parser.add_argument("--target_version", required=True, help="Target version to upgrade") - parser.add_argument("--connection_timeout", - help="Client connection timeout in seconds used for the SSH connections between the " - "installer machine and the DSF nodes being upgraded. Its purpose is to ensure a " - "uniform behavior across different platforms. Note that the SSH server in the DSF nodes " - "may have its own timeout configurations which may override this setting.") - parser.add_argument("--test_connection", type=str_to_bool, - help="Whether to test the SSH connection to all DSF nodes being upgraded " - "before starting the upgrade") - parser.add_argument("--run_preflight_validations", type=str_to_bool, - help="Whether to run preflight validations") - parser.add_argument("--run_upgrade", type=str_to_bool, help="Whether to run the upgrade") - parser.add_argument("--run_postflight_validations", type=str_to_bool, - help="Whether to run postflight validations") - # parser.add_argument("--clean_old_deployments", type=str_to_bool, help="Whether to clean old deployments") - parser.add_argument("--stop_on_failure", type=str_to_bool, - help="Whether to stop or continue to upgrade the next DSF nodes in case of failure " - "on a DSF node") - parser.add_argument("--tarball_location", - help="JSON-encoded S3 bucket location of the DSF installation software") - args = parser.parse_args() - return args - - -def fill_args_defaults(args): - if args.connection_timeout is None: - args.connection_timeout = 90 - if args.test_connection is None: - args.test_connection = True - if args.run_preflight_validations is None: - args.run_preflight_validations = True - if args.run_upgrade is None: - args.run_upgrade = True - if args.run_postflight_validations is None: - args.run_postflight_validations = True - if args.stop_on_failure is None: - args.stop_on_failure = True - if args.tarball_location is None: - args.tarball_location = '{"s3_bucket": "1ef8de27-ed95-40ff-8c08-7969fc1b7901", "s3_region": "us-east-1"}' - - args.clean_old_deployments = False - - -def print_inputs(agentless_gws, hubs, tarball_location, args): - print("List of Agentless Gateways:") - print_hadr_sets(agentless_gws) - print("List of DSF Hubs:") - print_hadr_sets(hubs) - print(f"target_version: {args.target_version}") - print(f"connection_timeout: {args.connection_timeout}") - print(f"test_connection: {args.test_connection}") - print(f"run_preflight_validations: {args.run_preflight_validations}") - print(f"run_upgrade: {args.run_upgrade}") - print(f"run_postflight_validations: {args.run_postflight_validations}") - print(f"clean_old_deployments: {args.clean_old_deployments}") - print(f"stop_on_failure: {args.stop_on_failure}") - print(f"tarball_location: {tarball_location}") +def is_empty_run(args, upgrade_status_service): + if not args.test_connection and not args.run_preflight_validations and not args.run_upgrade and \ + not args.run_postflight_validations and not args.clean_old_deployments: + print("All flags are disabled. Nothing to do here.") + print_summary(upgrade_status_service) + return True + return False def test_connection_to_extended_nodes(extended_nodes, stop_on_failure, upgrade_status_service): @@ -926,6 +885,56 @@ def verify_successful_run_by_configuration_options(args, upgrade_status_service) return is_successful_run +# Preparation functions + + +def parse_args(): + parser = argparse.ArgumentParser(description="Upgrade script for DSF Hub and Agentless Gateway") + parser.add_argument("--agentless_gws", required=True, help="JSON-encoded Agentless Gateway list") + parser.add_argument("--dsf_hubs", required=True, help="JSON-encoded DSF Hub list") + parser.add_argument("--target_version", required=True, help="Target version to upgrade") + parser.add_argument("--connection_timeout", + help="Client connection timeout in seconds used for the SSH connections between the " + "installer machine and the DSF nodes being upgraded. Its purpose is to ensure a " + "uniform behavior across different platforms. Note that the SSH server in the DSF nodes " + "may have its own timeout configurations which may override this setting.") + parser.add_argument("--test_connection", type=str_to_bool, + help="Whether to test the SSH connection to all DSF nodes being upgraded " + "before starting the upgrade") + parser.add_argument("--run_preflight_validations", type=str_to_bool, + help="Whether to run preflight validations") + parser.add_argument("--run_upgrade", type=str_to_bool, help="Whether to run the upgrade") + parser.add_argument("--run_postflight_validations", type=str_to_bool, + help="Whether to run postflight validations") + # parser.add_argument("--clean_old_deployments", type=str_to_bool, help="Whether to clean old deployments") + parser.add_argument("--stop_on_failure", type=str_to_bool, + help="Whether to stop or continue to upgrade the next DSF nodes in case of failure " + "on a DSF node") + parser.add_argument("--tarball_location", + help="JSON-encoded S3 bucket location of the DSF installation software") + args = parser.parse_args() + return args + + +def fill_args_defaults(args): + if args.connection_timeout is None: + args.connection_timeout = 90 + if args.test_connection is None: + args.test_connection = True + if args.run_preflight_validations is None: + args.run_preflight_validations = True + if args.run_upgrade is None: + args.run_upgrade = True + if args.run_postflight_validations is None: + args.run_postflight_validations = True + if args.stop_on_failure is None: + args.stop_on_failure = True + if args.tarball_location is None: + args.tarball_location = '{"s3_bucket": "1ef8de27-ed95-40ff-8c08-7969fc1b7901", "s3_region": "us-east-1"}' + + args.clean_old_deployments = False + + def set_global_variables(connection_timeout): global _connection_timeout _connection_timeout = int(connection_timeout) From f7dc2c31f74759f586d0e91bc7822b251b40f79c Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Wed, 15 Nov 2023 17:21:58 +0200 Subject: [PATCH 08/12] Added missing "SSH" to keys descriptions --- .../dsf_single_account_deployment/variables.tf | 8 ++++---- .../sonar_multi_account_deployment/variables.tf | 8 ++++---- .../sonar_single_account_deployment/variables.tf | 8 ++++---- modules/aws/agentless-gw/variables.tf | 4 ++-- modules/aws/hub/variables.tf | 4 ++-- modules/aws/sonar-base-instance/secret.tf | 2 +- modules/aws/sonar-base-instance/variables.tf | 8 ++++---- modules/azurerm/agentless-gw/variables.tf | 4 ++-- modules/azurerm/hub/variables.tf | 4 ++-- modules/azurerm/sonar-base-instance/variables.tf | 10 +++++----- 10 files changed, 30 insertions(+), 30 deletions(-) diff --git a/examples/aws/installation/dsf_single_account_deployment/variables.tf b/examples/aws/installation/dsf_single_account_deployment/variables.tf index 71538fd92..08c2fe690 100644 --- a/examples/aws/installation/dsf_single_account_deployment/variables.tf +++ b/examples/aws/installation/dsf_single_account_deployment/variables.tf @@ -427,25 +427,25 @@ variable "sonar_terraform_script_path_folder" { variable "sonarw_hub_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_hub_public_key_file_path" { type = string default = null - description = "The DSF Hub sonarw user public key file path - used for remote Agentless Gateway federation, HADR, etc." + description = "The DSF Hub sonarw user SSH public key file path - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_gw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_gw_public_key_file_path" { type = string default = null - description = "The Agentless Gateway sonarw user public key file path - used for remote Agentless Gateway federation, HADR, etc." + description = "The Agentless Gateway sonarw user SSH public key file path - used for remote Agentless Gateway federation, HADR, etc." } variable "sonar_machine_base_directory" { diff --git a/examples/aws/installation/sonar_multi_account_deployment/variables.tf b/examples/aws/installation/sonar_multi_account_deployment/variables.tf index b150af7ab..7d5163256 100644 --- a/examples/aws/installation/sonar_multi_account_deployment/variables.tf +++ b/examples/aws/installation/sonar_multi_account_deployment/variables.tf @@ -320,25 +320,25 @@ variable "terraform_script_path_folder" { variable "sonarw_hub_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_hub_public_key_file_path" { type = string default = null - description = "The DSF Hub sonarw user public key file path - used for remote Agentless Gateway federation, HADR, etc." + description = "The DSF Hub sonarw user SSH public key file path - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_gw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_gw_public_key_file_path" { type = string default = null - description = "The Agentless Gateway sonarw user public key file path - used for remote Agentless Gateway federation, HADR, etc." + description = "The Agentless Gateway sonarw user SSH public key file path - used for remote Agentless Gateway federation, HADR, etc." } variable "sonar_machine_base_directory" { diff --git a/examples/aws/installation/sonar_single_account_deployment/variables.tf b/examples/aws/installation/sonar_single_account_deployment/variables.tf index 3f2a45a36..de89f7792 100644 --- a/examples/aws/installation/sonar_single_account_deployment/variables.tf +++ b/examples/aws/installation/sonar_single_account_deployment/variables.tf @@ -233,25 +233,25 @@ variable "terraform_script_path_folder" { variable "sonarw_hub_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_hub_public_key_file_path" { type = string default = null - description = "The DSF Hub sonarw user public key file path - used for remote Agentless Gateway federation, HADR, etc." + description = "The DSF Hub sonarw user SSH public key file path - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_gw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_gw_public_key_file_path" { type = string default = null - description = "The Agentless Gateway sonarw user public key file path - used for remote Agentless Gateway federation, HADR, etc." + description = "The Agentless Gateway sonarw user SSH public key file path - used for remote Agentless Gateway federation, HADR, etc." } variable "use_hub_as_proxy" { diff --git a/modules/aws/agentless-gw/variables.tf b/modules/aws/agentless-gw/variables.tf index 1c8d278b4..9b5359fb6 100644 --- a/modules/aws/agentless-gw/variables.tf +++ b/modules/aws/agentless-gw/variables.tf @@ -239,13 +239,13 @@ variable "terraform_script_path_folder" { variable "sonarw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_public_key_content" { type = string default = null - description = "The Agentless Gateway sonarw user public key - used for remote Agentless Gateway federation, HADR, etc." + description = "The Agentless Gateway sonarw user SSH public key - used for remote Agentless Gateway federation, HADR, etc." } variable "volume_attachment_device_name" { diff --git a/modules/aws/hub/variables.tf b/modules/aws/hub/variables.tf index 10f5196d7..2b4050019 100644 --- a/modules/aws/hub/variables.tf +++ b/modules/aws/hub/variables.tf @@ -253,13 +253,13 @@ variable "terraform_script_path_folder" { variable "sonarw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_public_key_content" { type = string default = null - description = "The DSF Hub sonarw user public key - used for remote Agentless Gateway federation, HADR, etc." + description = "The DSF Hub sonarw user SSH public key - used for remote Agentless Gateway federation, HADR, etc." } variable "generate_access_tokens" { diff --git a/modules/aws/sonar-base-instance/secret.tf b/modules/aws/sonar-base-instance/secret.tf index bd21e783f..29ba00d69 100644 --- a/modules/aws/sonar-base-instance/secret.tf +++ b/modules/aws/sonar-base-instance/secret.tf @@ -51,7 +51,7 @@ locals { resource "aws_secretsmanager_secret" "sonarw_private_key_secret" { count = local.should_create_sonarw_private_key_in_secrets_manager == true ? 1 : 0 name_prefix = "${var.name}-sonarw-private-key" - description = "Imperva DSF node sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Imperva DSF node sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." tags = var.tags } diff --git a/modules/aws/sonar-base-instance/variables.tf b/modules/aws/sonar-base-instance/variables.tf index 89437452b..c64f36f4f 100644 --- a/modules/aws/sonar-base-instance/variables.tf +++ b/modules/aws/sonar-base-instance/variables.tf @@ -167,7 +167,7 @@ variable "main_node_sonarw_public_key" { variable "main_node_sonarw_private_key" { type = string - description = "Private key of the sonarw user taken from the main node output. This variable must only be defined for the DR node." + description = "SSH private key of the sonarw user taken from the main node output. This variable must only be defined for the DR node." default = null } @@ -183,7 +183,7 @@ variable "proxy_info" { variable "hub_sonarw_public_key" { type = string - description = "Public key of the sonarw user taken from the main Hub output. This variable must only be defined for the Gateway. Used, for example, in federation." + description = "SSH public key of the sonarw user taken from the main Hub output. This variable must only be defined for the Gateway. Used, for example, in federation." default = null } @@ -204,13 +204,13 @@ variable "terraform_script_path_folder" { variable "sonarw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the DSF node sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the DSF node sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_public_key_content" { type = string default = null - description = "The DSF node sonarw user public key - used for remote Agentless Gateway federation, HADR, etc." + description = "The DSF node sonarw user SSH public key - used for remote Agentless Gateway federation, HADR, etc." } variable "generate_access_tokens" { diff --git a/modules/azurerm/agentless-gw/variables.tf b/modules/azurerm/agentless-gw/variables.tf index f3a7d1ce0..6ccd092e1 100644 --- a/modules/azurerm/agentless-gw/variables.tf +++ b/modules/azurerm/agentless-gw/variables.tf @@ -215,13 +215,13 @@ variable "terraform_script_path_folder" { variable "sonarw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the Agentless Gateway sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_public_key_content" { type = string default = null - description = "The Agentless Gateway sonarw user public key - used for remote Agentless Gateway federation, HADR, etc." + description = "The Agentless Gateway sonarw user SSH public key - used for remote Agentless Gateway federation, HADR, etc." } variable "send_usage_statistics" { diff --git a/modules/azurerm/hub/variables.tf b/modules/azurerm/hub/variables.tf index 5ce5f3d53..04c455d41 100644 --- a/modules/azurerm/hub/variables.tf +++ b/modules/azurerm/hub/variables.tf @@ -235,13 +235,13 @@ variable "terraform_script_path_folder" { variable "sonarw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the DSF Hub sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_public_key_content" { type = string default = null - description = "The DSF Hub sonarw user public key - used for remote Agentless Gateway federation, HADR, etc." + description = "The DSF Hub sonarw user SSH public key - used for remote Agentless Gateway federation, HADR, etc." } variable "generate_access_tokens" { diff --git a/modules/azurerm/sonar-base-instance/variables.tf b/modules/azurerm/sonar-base-instance/variables.tf index 5b8dd1407..05fea95b5 100644 --- a/modules/azurerm/sonar-base-instance/variables.tf +++ b/modules/azurerm/sonar-base-instance/variables.tf @@ -144,13 +144,13 @@ variable "hadr_dr_node" { variable "main_node_sonarw_public_key" { type = string - description = "Public key of the sonarw user taken from the main node output. This variable must only be defined for the DR node." + description = "SSH public key of the sonarw user taken from the main node output. This variable must only be defined for the DR node." default = null } variable "main_node_sonarw_private_key" { type = string - description = "Private key of the sonarw user taken from the main node output. This variable must only be defined for the DR node." + description = "SSH private key of the sonarw user taken from the main node output. This variable must only be defined for the DR node." default = null } @@ -166,7 +166,7 @@ variable "proxy_info" { variable "hub_sonarw_public_key" { type = string - description = "Public key of the sonarw user taken from the main Hub output. This variable must only be defined for the Gateway. Used, for example, in federation." + description = "SSH public key of the sonarw user taken from the main Hub output. This variable must only be defined for the Gateway. Used, for example, in federation." default = null } @@ -187,13 +187,13 @@ variable "terraform_script_path_folder" { variable "sonarw_private_key_secret_name" { type = string default = null - description = "Secret name in AWS secrets manager which holds the DSF node sonarw user private key - used for remote Agentless Gateway federation, HADR, etc." + description = "Secret name in AWS secrets manager which holds the DSF node sonarw user SSH private key - used for remote Agentless Gateway federation, HADR, etc." } variable "sonarw_public_key_content" { type = string default = null - description = "The DSF node sonarw user public key - used for remote Agentless Gateway federation, HADR, etc." + description = "The DSF node sonarw user SSH public key - used for remote Agentless Gateway federation, HADR, etc." } variable "generate_access_tokens" { From 8a741363b253392fc258af26039c05f84a8d3dd1 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Thu, 16 Nov 2023 13:10:52 +0200 Subject: [PATCH 09/12] Refactoring: Extracted function - run_upgrade_stages --- .../python_upgrader/upgrade/main.py | 80 ++++++++++--------- 1 file changed, 43 insertions(+), 37 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index ba075d5c7..869b92e85 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -225,43 +225,8 @@ def main(args): return try: - if args.test_connection: - succeeded = test_connection_to_extended_nodes(extended_nodes, args.stop_on_failure, upgrade_status_service) - if succeeded: - print(f"### Test connection to all DSF nodes succeeded") - - python_location_dict = {} - if should_run_python(args): - python_location_dict = collect_python_locations(extended_nodes, args.stop_on_failure, - upgrade_status_service) - - # Preflight validation - if args.run_preflight_validations: - preflight_validations_passed = run_all_preflight_validations(agentless_gw_extended_nodes, - dsf_hub_extended_nodes, args.target_version, - python_location_dict, args.stop_on_failure, - upgrade_status_service) - if preflight_validations_passed: - print(f"### Preflight validations passed for all DSF nodes") - - # Upgrade, postflight validations, clean old deployments - if args.run_upgrade or args.run_postflight_validations or args.clean_old_deployments: - success = maybe_upgrade_and_postflight(agentless_gws, hubs, args.target_version, args.run_upgrade, - args.run_postflight_validations, args.clean_old_deployments, - python_location_dict, args.stop_on_failure, tarball_location, - upgrade_status_service) - print_upgrade_result = args.run_upgrade - print_postflight_result = not args.run_upgrade and args.run_postflight_validations - if print_upgrade_result: - if success: - print(f"### Upgrade succeeded") - else: - print(f"### Upgrade failed") - if print_postflight_result: - if success: - print(f"### Upgrade postflight validations passed") - else: - print(f"### Upgrade postflight validations didn't pass") + run_upgrade_stages(args, extended_nodes, agentless_gw_extended_nodes, dsf_hub_extended_nodes, + agentless_gws, hubs, tarball_location, upgrade_status_service) except UpgradeException as e: print(f"### Error message: {e}") print(f"### An error occurred, aborting upgrade...") @@ -295,6 +260,47 @@ def is_empty_run(args, upgrade_status_service): return False +def run_upgrade_stages(args, extended_nodes, agentless_gw_extended_nodes, dsf_hub_extended_nodes, agentless_gws, hubs, + tarball_location, upgrade_status_service): + if args.test_connection: + succeeded = test_connection_to_extended_nodes(extended_nodes, args.stop_on_failure, upgrade_status_service) + if succeeded: + print(f"### Test connection to all DSF nodes succeeded") + + python_location_dict = {} + if should_run_python(args): + python_location_dict = collect_python_locations(extended_nodes, args.stop_on_failure, + upgrade_status_service) + + # Preflight validation + if args.run_preflight_validations: + preflight_validations_passed = run_all_preflight_validations(agentless_gw_extended_nodes, + dsf_hub_extended_nodes, args.target_version, + python_location_dict, args.stop_on_failure, + upgrade_status_service) + if preflight_validations_passed: + print(f"### Preflight validations passed for all DSF nodes") + + # Upgrade, postflight validations, clean old deployments + if args.run_upgrade or args.run_postflight_validations or args.clean_old_deployments: + success = maybe_upgrade_and_postflight(agentless_gws, hubs, args.target_version, args.run_upgrade, + args.run_postflight_validations, args.clean_old_deployments, + python_location_dict, args.stop_on_failure, tarball_location, + upgrade_status_service) + print_upgrade_result = args.run_upgrade + print_postflight_result = not args.run_upgrade and args.run_postflight_validations + if print_upgrade_result: + if success: + print(f"### Upgrade succeeded") + else: + print(f"### Upgrade failed") + if print_postflight_result: + if success: + print(f"### Upgrade postflight validations passed") + else: + print(f"### Upgrade postflight validations didn't pass") + + def test_connection_to_extended_nodes(extended_nodes, stop_on_failure, upgrade_status_service): ''' :param extended_nodes: From 7e2ca98384fe960e6dcf340bf74e7d5a155d4506 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Thu, 16 Nov 2023 13:25:31 +0200 Subject: [PATCH 10/12] Refactoring: Extracted function - run_upgrade_stages and function for each stage or step (step is "less" than stage) --- .../python_upgrader/upgrade/main.py | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index 869b92e85..e7ab448d0 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -262,17 +262,35 @@ def is_empty_run(args, upgrade_status_service): def run_upgrade_stages(args, extended_nodes, agentless_gw_extended_nodes, dsf_hub_extended_nodes, agentless_gws, hubs, tarball_location, upgrade_status_service): + + run_test_connection_stage(args, extended_nodes, upgrade_status_service) + + python_location_dict = run_collect_python_location_step(args, extended_nodes, upgrade_status_service) + + run_preflight_validations_stage(args, agentless_gw_extended_nodes, dsf_hub_extended_nodes, python_location_dict, + upgrade_status_service) + + run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, tarball_location, python_location_dict, + upgrade_status_service) + + +def run_test_connection_stage(args, extended_nodes, upgrade_status_service): if args.test_connection: succeeded = test_connection_to_extended_nodes(extended_nodes, args.stop_on_failure, upgrade_status_service) if succeeded: print(f"### Test connection to all DSF nodes succeeded") + +def run_collect_python_location_step(args, extended_nodes, upgrade_status_service): python_location_dict = {} if should_run_python(args): python_location_dict = collect_python_locations(extended_nodes, args.stop_on_failure, upgrade_status_service) + return python_location_dict - # Preflight validation + +def run_preflight_validations_stage(args, agentless_gw_extended_nodes, dsf_hub_extended_nodes, python_location_dict, + upgrade_status_service): if args.run_preflight_validations: preflight_validations_passed = run_all_preflight_validations(agentless_gw_extended_nodes, dsf_hub_extended_nodes, args.target_version, @@ -281,7 +299,9 @@ def run_upgrade_stages(args, extended_nodes, agentless_gw_extended_nodes, dsf_hu if preflight_validations_passed: print(f"### Preflight validations passed for all DSF nodes") - # Upgrade, postflight validations, clean old deployments + +def run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, tarball_location, python_location_dict, + upgrade_status_service): if args.run_upgrade or args.run_postflight_validations or args.clean_old_deployments: success = maybe_upgrade_and_postflight(agentless_gws, hubs, args.target_version, args.run_upgrade, args.run_postflight_validations, args.clean_old_deployments, From b0e61517f21a7d96ef7dccef66e72f46d1d8f5d9 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Thu, 16 Nov 2023 14:06:44 +0200 Subject: [PATCH 11/12] Refactoring: Converted extended nodes list to a dictionary when the node Id is the key --- .../python_upgrader/upgrade/main.py | 84 +++++++++++-------- 1 file changed, 47 insertions(+), 37 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index e7ab448d0..1f31a5882 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -115,18 +115,18 @@ def generate_dsf_node_name(dsf_node_type, hadr_node_type_name, dsf_node_id): return f"{dsf_node_type}, {hadr_node_type_name}, {dsf_node_id}" -def get_flat_extended_node_list(hadr_sets, dsf_node_type): - extended_nodes = [] +def get_extended_node_dict(hadr_sets, dsf_node_type): + extended_nodes = {} for hadr_set in hadr_sets: main_node = get_extended_node(hadr_set, 'main', 'Main', dsf_node_type) if main_node is not None: - extended_nodes.append(main_node) + extended_nodes[main_node.get('dsf_node_id')] = main_node dr_node = get_extended_node(hadr_set, 'dr', 'DR', dsf_node_type) if dr_node is not None: - extended_nodes.append(dr_node) + extended_nodes[dr_node.get('dsf_node_id')] = dr_node minor_node = get_extended_node(hadr_set, 'minor', 'Minor', dsf_node_type) if minor_node is not None: - extended_nodes.append(minor_node) + extended_nodes[minor_node.get('dsf_node_id')] = minor_node return extended_nodes @@ -215,17 +215,17 @@ def main(args): print("********** Start ************") - agentless_gw_extended_nodes = get_flat_extended_node_list(agentless_gws, "Agentless Gateway") - dsf_hub_extended_nodes = get_flat_extended_node_list(hubs, "DSF Hub") - extended_nodes = agentless_gw_extended_nodes + dsf_hub_extended_nodes + agentless_gw_extended_node_dict = get_extended_node_dict(agentless_gws, "Agentless Gateway") + dsf_hub_extended_node_dict = get_extended_node_dict(hubs, "DSF Hub") + extended_node_dict = {**agentless_gw_extended_node_dict, **dsf_hub_extended_node_dict} - upgrade_status_service = init_upgrade_status(extended_nodes, args.target_version) + upgrade_status_service = init_upgrade_status(extended_node_dict, args.target_version) if is_empty_run(args, upgrade_status_service): return try: - run_upgrade_stages(args, extended_nodes, agentless_gw_extended_nodes, dsf_hub_extended_nodes, + run_upgrade_stages(args, extended_node_dict, agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, agentless_gws, hubs, tarball_location, upgrade_status_service) except UpgradeException as e: print(f"### Error message: {e}") @@ -244,9 +244,9 @@ def main(args): raise UpgradeException("One of the upgrade stages failed") -def init_upgrade_status(extended_nodes, target_version): +def init_upgrade_status(extended_node_dict, target_version): upgrade_status_service = UpgradeStatusService() - dsf_nodes_ids = [node.get('dsf_node_id') for node in extended_nodes] + dsf_nodes_ids = list(extended_node_dict) upgrade_status_service.init_upgrade_status(dsf_nodes_ids, target_version) return upgrade_status_service @@ -260,40 +260,50 @@ def is_empty_run(args, upgrade_status_service): return False -def run_upgrade_stages(args, extended_nodes, agentless_gw_extended_nodes, dsf_hub_extended_nodes, agentless_gws, hubs, - tarball_location, upgrade_status_service): +def run_upgrade_stages(args, extended_node_dict, agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, + agentless_gws, hubs, tarball_location, upgrade_status_service): + """ + Runs the various upgrade stages: + - Test connection for all nodes + - Preflight validations for all nodes + - Upgrade and post upgrade operations per node: + - Upgrade + - Postflight validations + - Clean old deployments + Also runs upgrade steps which are smaller than stages: + - Collect python location for all nodes + """ + run_test_connection_stage(args, extended_node_dict, upgrade_status_service) - run_test_connection_stage(args, extended_nodes, upgrade_status_service) + python_location_dict = run_collect_python_location_step(args, extended_node_dict, upgrade_status_service) - python_location_dict = run_collect_python_location_step(args, extended_nodes, upgrade_status_service) - - run_preflight_validations_stage(args, agentless_gw_extended_nodes, dsf_hub_extended_nodes, python_location_dict, - upgrade_status_service) + run_preflight_validations_stage(args, agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, + python_location_dict, upgrade_status_service) run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, tarball_location, python_location_dict, upgrade_status_service) -def run_test_connection_stage(args, extended_nodes, upgrade_status_service): +def run_test_connection_stage(args, extended_node_dict, upgrade_status_service): if args.test_connection: - succeeded = test_connection_to_extended_nodes(extended_nodes, args.stop_on_failure, upgrade_status_service) + succeeded = test_connection_to_extended_nodes(extended_node_dict, args.stop_on_failure, upgrade_status_service) if succeeded: print(f"### Test connection to all DSF nodes succeeded") -def run_collect_python_location_step(args, extended_nodes, upgrade_status_service): +def run_collect_python_location_step(args, extended_node_dict, upgrade_status_service): python_location_dict = {} if should_run_python(args): - python_location_dict = collect_python_locations(extended_nodes, args.stop_on_failure, + python_location_dict = collect_python_locations(extended_node_dict, args.stop_on_failure, upgrade_status_service) return python_location_dict -def run_preflight_validations_stage(args, agentless_gw_extended_nodes, dsf_hub_extended_nodes, python_location_dict, - upgrade_status_service): +def run_preflight_validations_stage(args, agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, + python_location_dict, upgrade_status_service): if args.run_preflight_validations: - preflight_validations_passed = run_all_preflight_validations(agentless_gw_extended_nodes, - dsf_hub_extended_nodes, args.target_version, + preflight_validations_passed = run_all_preflight_validations(agentless_gw_extended_node_dict, + dsf_hub_extended_node_dict, args.target_version, python_location_dict, args.stop_on_failure, upgrade_status_service) if preflight_validations_passed: @@ -321,15 +331,15 @@ def run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, tarball_locat print(f"### Upgrade postflight validations didn't pass") -def test_connection_to_extended_nodes(extended_nodes, stop_on_failure, upgrade_status_service): +def test_connection_to_extended_nodes(extended_node_dict, stop_on_failure, upgrade_status_service): ''' - :param extended_nodes: + :param extended_node_dict: :return: True if test connection to all extended DSF nodes was successful, false if it failed for at least one node ''' print("----- Test connection") all_success_or_skip = True - for extended_node in extended_nodes: + for extended_node in extended_node_dict.values(): success_or_skip = maybe_test_connection_to_extended_node(extended_node, stop_on_failure, upgrade_status_service) all_success_or_skip = all_success_or_skip and success_or_skip return all_success_or_skip @@ -370,10 +380,10 @@ def should_run_python(args): return args.run_preflight_validations or args.run_postflight_validations -def collect_python_locations(extended_nodes, stop_on_failure, upgrade_status_service): +def collect_python_locations(extended_node_dict, stop_on_failure, upgrade_status_service): print("----- Collect Python location") python_location_dict = {} - for extended_node in extended_nodes: + for extended_node in extended_node_dict.values(): python_location = maybe_collect_python_location(extended_node, stop_on_failure, upgrade_status_service) if python_location is not None: python_location_dict[extended_node.get('dsf_node_id')] = python_location @@ -404,17 +414,17 @@ def collect_python_location(extended_node, stop_on_failure, upgrade_status_servi return None -def run_all_preflight_validations(agentless_gw_extended_nodes, dsf_hub_extended_nodes, target_version, +def run_all_preflight_validations(agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, target_version, python_location_dict, stop_on_failure, upgrade_status_service): print("----- Preflight validations") - gws_preflight_validations_passed = run_preflight_validations_for_extended_nodes(agentless_gw_extended_nodes, + gws_preflight_validations_passed = run_preflight_validations_for_extended_nodes(agentless_gw_extended_node_dict, target_version, PREFLIGHT_VALIDATIONS_SCRIPT_NAME, python_location_dict, stop_on_failure, upgrade_status_service) - hub_preflight_validations_passed = run_preflight_validations_for_extended_nodes(dsf_hub_extended_nodes, + hub_preflight_validations_passed = run_preflight_validations_for_extended_nodes(dsf_hub_extended_node_dict, target_version, PREFLIGHT_VALIDATIONS_SCRIPT_NAME, python_location_dict, @@ -423,10 +433,10 @@ def run_all_preflight_validations(agentless_gw_extended_nodes, dsf_hub_extended_ return gws_preflight_validations_passed and hub_preflight_validations_passed -def run_preflight_validations_for_extended_nodes(extended_nodes, target_version, script_file_name, +def run_preflight_validations_for_extended_nodes(extended_node_dict, target_version, script_file_name, python_location_dict, stop_on_failure, upgrade_status_service): all_success_or_skip = True - for extended_node in extended_nodes: + for extended_node in extended_node_dict.values(): success_or_skip = maybe_run_preflight_validations_for_extended_node(extended_node, target_version, script_file_name, python_location_dict, stop_on_failure, upgrade_status_service) From 2bcbb5567caf51f7a3d8490d46b2c2b1a4489000 Mon Sep 17 00:00:00 2001 From: "linda.nasredin" Date: Thu, 16 Nov 2023 14:36:30 +0200 Subject: [PATCH 12/12] EDSF-400 Refactor to use the extended node already created in previous steps --- .../python_upgrader/upgrade/main.py | 170 ++++++++---------- 1 file changed, 71 insertions(+), 99 deletions(-) diff --git a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py index 1f31a5882..899a6675f 100644 --- a/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py +++ b/modules/aws/sonar-upgrader/python_upgrader/upgrade/main.py @@ -18,6 +18,8 @@ POSTFLIGHT_VALIDATIONS_SCRIPT_NAME = "run_postflight_validations.py" CLEAN_OLD_DEPLOYMENTS_SCRIPT_NAME = "clean_old_deployments.sh" +UNDEFINED_PYTHON_LOCATION = "UNDEFINED_PYTHON_LOCATION" + # Globals _connection_timeout = None _run_dummy_upgrade = False @@ -144,7 +146,8 @@ def create_extended_node(dsf_node, dsf_node_id, dsf_node_name): return { "dsf_node": dsf_node, "dsf_node_id": dsf_node_id, - "dsf_node_name": dsf_node_name + "dsf_node_name": dsf_node_name, + "python_location": UNDEFINED_PYTHON_LOCATION # Will be filled later } @@ -275,12 +278,12 @@ def run_upgrade_stages(args, extended_node_dict, agentless_gw_extended_node_dict """ run_test_connection_stage(args, extended_node_dict, upgrade_status_service) - python_location_dict = run_collect_python_location_step(args, extended_node_dict, upgrade_status_service) + run_collect_python_location_step(args, extended_node_dict, upgrade_status_service) run_preflight_validations_stage(args, agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, - python_location_dict, upgrade_status_service) + upgrade_status_service) - run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, tarball_location, python_location_dict, + run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, extended_node_dict, tarball_location, upgrade_status_service) @@ -292,30 +295,29 @@ def run_test_connection_stage(args, extended_node_dict, upgrade_status_service): def run_collect_python_location_step(args, extended_node_dict, upgrade_status_service): - python_location_dict = {} + """ + Collects the Python location in the DSF nodes and fills it in the extended_node_dict + """ if should_run_python(args): - python_location_dict = collect_python_locations(extended_node_dict, args.stop_on_failure, - upgrade_status_service) - return python_location_dict + collect_python_locations(extended_node_dict, args.stop_on_failure, upgrade_status_service) def run_preflight_validations_stage(args, agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, - python_location_dict, upgrade_status_service): + upgrade_status_service): if args.run_preflight_validations: preflight_validations_passed = run_all_preflight_validations(agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, args.target_version, - python_location_dict, args.stop_on_failure, - upgrade_status_service) + args.stop_on_failure, upgrade_status_service) if preflight_validations_passed: print(f"### Preflight validations passed for all DSF nodes") -def run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, tarball_location, python_location_dict, +def run_upgrade_and_post_upgrade_stages(args, agentless_gws, hubs, extended_node_dict, tarball_location, upgrade_status_service): if args.run_upgrade or args.run_postflight_validations or args.clean_old_deployments: - success = maybe_upgrade_and_postflight(agentless_gws, hubs, args.target_version, args.run_upgrade, - args.run_postflight_validations, args.clean_old_deployments, - python_location_dict, args.stop_on_failure, tarball_location, + success = maybe_upgrade_and_postflight(agentless_gws, hubs, extended_node_dict, args.target_version, + args.run_upgrade, args.run_postflight_validations, + args.clean_old_deployments, args.stop_on_failure, tarball_location, upgrade_status_service) print_upgrade_result = args.run_upgrade print_postflight_result = not args.run_upgrade and args.run_postflight_validations @@ -382,12 +384,9 @@ def should_run_python(args): def collect_python_locations(extended_node_dict, stop_on_failure, upgrade_status_service): print("----- Collect Python location") - python_location_dict = {} for extended_node in extended_node_dict.values(): python_location = maybe_collect_python_location(extended_node, stop_on_failure, upgrade_status_service) - if python_location is not None: - python_location_dict[extended_node.get('dsf_node_id')] = python_location - return python_location_dict + extended_node['python_location'] = python_location def maybe_collect_python_location(extended_node, stop_on_failure, upgrade_status_service): @@ -415,65 +414,50 @@ def collect_python_location(extended_node, stop_on_failure, upgrade_status_servi def run_all_preflight_validations(agentless_gw_extended_node_dict, dsf_hub_extended_node_dict, target_version, - python_location_dict, stop_on_failure, upgrade_status_service): + stop_on_failure, upgrade_status_service): print("----- Preflight validations") gws_preflight_validations_passed = run_preflight_validations_for_extended_nodes(agentless_gw_extended_node_dict, target_version, PREFLIGHT_VALIDATIONS_SCRIPT_NAME, - python_location_dict, stop_on_failure, upgrade_status_service) hub_preflight_validations_passed = run_preflight_validations_for_extended_nodes(dsf_hub_extended_node_dict, target_version, PREFLIGHT_VALIDATIONS_SCRIPT_NAME, - python_location_dict, stop_on_failure, upgrade_status_service) return gws_preflight_validations_passed and hub_preflight_validations_passed -def run_preflight_validations_for_extended_nodes(extended_node_dict, target_version, script_file_name, - python_location_dict, stop_on_failure, upgrade_status_service): +def run_preflight_validations_for_extended_nodes(extended_node_dict, target_version, script_file_name, stop_on_failure, + upgrade_status_service): all_success_or_skip = True for extended_node in extended_node_dict.values(): success_or_skip = maybe_run_preflight_validations_for_extended_node(extended_node, target_version, - script_file_name, python_location_dict, - stop_on_failure, upgrade_status_service) + script_file_name, stop_on_failure, + upgrade_status_service) all_success_or_skip = all_success_or_skip and success_or_skip return all_success_or_skip -def maybe_run_preflight_validations_for_extended_node(extended_node, target_version, script_file_name, - python_location_dict, stop_on_failure, upgrade_status_service): +def maybe_run_preflight_validations_for_extended_node(extended_node, target_version, script_file_name, stop_on_failure, + upgrade_status_service): if upgrade_status_service.should_run_preflight_validations(extended_node.get('dsf_node_id')): return run_preflight_validations_for_extended_node(extended_node, target_version, script_file_name, - python_location_dict, stop_on_failure, - upgrade_status_service) + stop_on_failure, upgrade_status_service) return True -def run_preflight_validations_for_extended_node(extended_node, target_version, script_file_name, python_location_dict, - stop_on_failure, upgrade_status_service): - python_location = python_location_dict[extended_node.get('dsf_node_id')] - # TODO this will happen only in case of bug, do we really need it? - if python_location is None: - print(f"Python location not found in dictionary for {extended_node.get('dsf_node_id')}") - upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.PREFLIGHT_VALIDATIONS_FAILED, - "Python location not found") - if stop_on_failure: - raise UpgradeException(f"Python location not found in dictionary for {extended_node.get('dsf_node_id')}") - else: - return False - +def run_preflight_validations_for_extended_node(extended_node, target_version, script_file_name, stop_on_failure, + upgrade_status_service): error_message = None try: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), UpgradeStatus.RUNNING_PREFLIGHT_VALIDATIONS) preflight_validations_result = run_preflight_validations(extended_node.get('dsf_node'), extended_node.get('dsf_node_name'), target_version, - script_file_name, python_location) + script_file_name, extended_node.get('python_location')) if are_preflight_validations_passed(preflight_validations_result): print(f"### Preflight validations passed for {extended_node.get('dsf_node_name')}") upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), @@ -557,53 +541,55 @@ def are_preflight_validations_passed(preflight_validations_result): and preflight_validations_result.get('enough_free_disk_space') -def maybe_upgrade_and_postflight(agentless_gws, hubs, target_version, run_upgrade, run_postflight_validations, - clean_old_deployments, python_location_dict, stop_on_failure, tarball_location, - upgrade_status_service): +# Used do_run_postflight_validations since there is a function called run_postflight_validations +def maybe_upgrade_and_postflight(agentless_gws, hubs, extended_node_dict, target_version, run_upgrade, + do_run_postflight_validations, clean_old_deployments, stop_on_failure, + tarball_location, upgrade_status_service): if run_upgrade: print("----- Upgrade") gws_upgrade_and_postflight_succeeded = maybe_upgrade_and_postflight_hadr_sets(agentless_gws, "Agentless Gateway", - target_version, UPGRADE_SCRIPT_NAME, + extended_node_dict, + target_version, + UPGRADE_SCRIPT_NAME, run_upgrade, - run_postflight_validations, + do_run_postflight_validations, POSTFLIGHT_VALIDATIONS_SCRIPT_NAME, clean_old_deployments, CLEAN_OLD_DEPLOYMENTS_SCRIPT_NAME, - python_location_dict, stop_on_failure, tarball_location, upgrade_status_service) - hub_upgrade_and_postflight_succeeded = maybe_upgrade_and_postflight_hadr_sets(hubs, "DSF Hub", target_version, + hub_upgrade_and_postflight_succeeded = maybe_upgrade_and_postflight_hadr_sets(hubs, "DSF Hub", + extended_node_dict, + target_version, UPGRADE_SCRIPT_NAME, run_upgrade, - run_postflight_validations, + do_run_postflight_validations, POSTFLIGHT_VALIDATIONS_SCRIPT_NAME, clean_old_deployments, CLEAN_OLD_DEPLOYMENTS_SCRIPT_NAME, - python_location_dict, stop_on_failure, tarball_location, upgrade_status_service) return gws_upgrade_and_postflight_succeeded and hub_upgrade_and_postflight_succeeded -# Used do_run_postflight_validations since there is a function called run_postflight_validations -def maybe_upgrade_and_postflight_hadr_sets(hadr_sets, dsf_node_type, target_version, upgrade_script_file_name, - run_upgrade, do_run_postflight_validations, +def maybe_upgrade_and_postflight_hadr_sets(hadr_sets, dsf_node_type, extended_node_dict, target_version, + upgrade_script_file_name, run_upgrade, do_run_postflight_validations, postflight_validations_script_file_name, clean_old_deployments, - clean_old_deployments_script_file_name, python_location_dict, + clean_old_deployments_script_file_name, stop_on_failure, tarball_location, upgrade_status_service): all_success_or_skip = True for hadr_set in hadr_sets: - succeed_or_skipped = maybe_upgrade_and_postflight_hadr_set(hadr_set, dsf_node_type, target_version, + succeed_or_skipped = maybe_upgrade_and_postflight_hadr_set(hadr_set, dsf_node_type, extended_node_dict, + target_version, upgrade_script_file_name, run_upgrade, do_run_postflight_validations, postflight_validations_script_file_name, clean_old_deployments, clean_old_deployments_script_file_name, - python_location_dict, stop_on_failure, tarball_location, upgrade_status_service) @@ -611,27 +597,27 @@ def maybe_upgrade_and_postflight_hadr_sets(hadr_sets, dsf_node_type, target_vers return all_success_or_skip -def maybe_upgrade_and_postflight_hadr_set(hadr_set, dsf_node_type, target_version, upgrade_script_file_name, - run_upgrade, do_run_postflight_validations, +def maybe_upgrade_and_postflight_hadr_set(hadr_set, dsf_node_type, extended_node_dict, target_version, + upgrade_script_file_name, run_upgrade, do_run_postflight_validations, postflight_validations_script_file_name, clean_old_deployments, - clean_old_deployments_script_file_name, python_location_dict, + clean_old_deployments_script_file_name, stop_on_failure, tarball_location, upgrade_status_service): print(f"Checking if running upgrade and/or postflight validations is required for {dsf_node_type} set") - if maybe_upgrade_and_postflight_dsf_node(hadr_set.get('minor'), dsf_node_type, 'Minor', target_version, + if maybe_upgrade_and_postflight_dsf_node(hadr_set.get('minor'), extended_node_dict, target_version, upgrade_script_file_name, run_upgrade, do_run_postflight_validations, postflight_validations_script_file_name, clean_old_deployments, - clean_old_deployments_script_file_name, python_location_dict, + clean_old_deployments_script_file_name, stop_on_failure, tarball_location, upgrade_status_service): - if maybe_upgrade_and_postflight_dsf_node(hadr_set.get('dr'), dsf_node_type, 'DR', target_version, + if maybe_upgrade_and_postflight_dsf_node(hadr_set.get('dr'), extended_node_dict, target_version, upgrade_script_file_name, run_upgrade, do_run_postflight_validations, postflight_validations_script_file_name, clean_old_deployments, - clean_old_deployments_script_file_name, python_location_dict, + clean_old_deployments_script_file_name, stop_on_failure, tarball_location, upgrade_status_service): - if maybe_upgrade_and_postflight_dsf_node(hadr_set.get('main'), dsf_node_type, 'Main', target_version, + if maybe_upgrade_and_postflight_dsf_node(hadr_set.get('main'), extended_node_dict, target_version, upgrade_script_file_name, run_upgrade, do_run_postflight_validations, postflight_validations_script_file_name, clean_old_deployments, - clean_old_deployments_script_file_name, python_location_dict, + clean_old_deployments_script_file_name, stop_on_failure, tarball_location, upgrade_status_service): return True else: @@ -641,17 +627,15 @@ def maybe_upgrade_and_postflight_hadr_set(hadr_set, dsf_node_type, target_versio return False -def maybe_upgrade_and_postflight_dsf_node(dsf_node, dsf_node_type, hadr_node_type_name, target_version, +def maybe_upgrade_and_postflight_dsf_node(dsf_node, extended_node_dict, target_version, upgrade_script_file_name, run_upgrade, do_run_postflight_validations, postflight_validations_script_file_name, clean_old_deployments, - clean_old_deployments_script_file_name, python_location_dict, + clean_old_deployments_script_file_name, stop_on_failure, tarball_location, upgrade_status_service): if dsf_node is None: return True - # TODO refactor to use the extended node already created in previous steps dsf_node_id = generate_dsf_node_id(dsf_node) - dsf_node_name = generate_dsf_node_name(dsf_node_type, hadr_node_type_name, dsf_node_id) - extended_node = create_extended_node(dsf_node, dsf_node_id, dsf_node_name) + extended_node = extended_node_dict[dsf_node_id] if run_upgrade: upgrade_success_or_skip = maybe_upgrade_dsf_node(extended_node, target_version, upgrade_script_file_name, stop_on_failure, tarball_location, upgrade_status_service) @@ -661,18 +645,17 @@ def maybe_upgrade_and_postflight_dsf_node(dsf_node, dsf_node_type, hadr_node_typ if do_run_postflight_validations: postflight_success_or_skip = maybe_run_postflight_validations(extended_node, target_version, postflight_validations_script_file_name, - python_location_dict, stop_on_failure, - upgrade_status_service) + stop_on_failure, upgrade_status_service) if not postflight_success_or_skip: return False if clean_old_deployments: # TODO add status support when clean_old_deployments will be supported - clean_old_deployments_succeeded = run_clean_old_deployments(dsf_node, dsf_node_name, + clean_old_deployments_succeeded = run_clean_old_deployments(dsf_node, extended_node.get('dsf_node_name'), clean_old_deployments_script_file_name) if not clean_old_deployments_succeeded: # In case clean old deployments failed, print a warning without returning false - print(f"### Warning: Cleaning old deployments failed for {dsf_node_name}") + print(f"### Warning: Cleaning old deployments failed for {extended_node.get('dsf_node_name')}") return True @@ -752,40 +735,29 @@ def get_tarball_s3_key(target_version): return f"jsonar-{target_version}.tar.gz" -def maybe_run_postflight_validations(extended_node, target_version, script_file_name, python_location_dict, - stop_on_failure, upgrade_status_service): +def maybe_run_postflight_validations(extended_node, target_version, script_file_name, stop_on_failure, + upgrade_status_service): if upgrade_status_service.should_run_postflight_validations(extended_node.get('dsf_node_id')): - return run_postflight_validations(extended_node, target_version, script_file_name, python_location_dict, - stop_on_failure, upgrade_status_service) + return run_postflight_validations(extended_node, target_version, script_file_name, stop_on_failure, + upgrade_status_service) return True -def run_postflight_validations(extended_node, target_version, script_file_name, python_location_dict, - stop_on_failure, upgrade_status_service): - python_location = python_location_dict[extended_node.get('dsf_node_id')] - # TODO this will happen only in case of bug, do we really need it? - if python_location is None: - print(f"Python location not found in dictionary for {extended_node.get('dsf_node_id')}") - upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), - UpgradeStatus.POSTFLIGHT_VALIDATIONS_FAILED, - "Python location not found") - if stop_on_failure: - raise UpgradeException(f"Python location not found in dictionary for {extended_node.get('dsf_node_id')}") - else: - return False - +def run_postflight_validations(extended_node, target_version, script_file_name, stop_on_failure, + upgrade_status_service): print(f"Running postflight validations for {extended_node.get('dsf_node_name')}") - print(f"Python location (taken from dictionary) in {extended_node.get('dsf_node_name')} is {python_location}") error_message = None try: upgrade_status_service.update_upgrade_status(extended_node.get('dsf_node_id'), UpgradeStatus.RUNNING_POSTFLIGHT_VALIDATIONS) postflight_validations_result_json = run_postflight_validations_script(extended_node.get('dsf_node'), - target_version, python_location, + target_version, + extended_node.get('python_location'), script_file_name) postflight_validations_result = json.loads(postflight_validations_result_json) - print(f"Postflight validations result in {extended_node.get('dsf_node_name')} is {postflight_validations_result}") + print(f"Postflight validations result in {extended_node.get('dsf_node_name')} is " + f"{postflight_validations_result}") passed = are_postflight_validations_passed(postflight_validations_result) if passed: